Diff of the two buildlogs: -- --- b1/build.log 2023-04-19 17:39:48.160093816 +0000 +++ b2/build.log 2023-04-19 17:51:45.604271341 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Tue May 21 11:54:06 -12 2024 -I: pbuilder-time-stamp: 1716335646 +I: Current time: Thu Apr 20 07:39:50 +14 2023 +I: pbuilder-time-stamp: 1681925990 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/bookworm-reproducible-base.tgz] I: copying local configuration @@ -16,7 +16,7 @@ I: copying [./bison_3.8.2+dfsg.orig.tar.xz] I: copying [./bison_3.8.2+dfsg-1.debian.tar.xz] I: Extracting source -gpgv: Signature made Sat Oct 2 12:23:58 2021 -12 +gpgv: Signature made Sun Oct 3 14:23:58 2021 +14 gpgv: using RSA key A63A3F516EA2FCA2A2FB638D01B3A9952AAE4713 gpgv: issuer "cklin@debian.org" gpgv: Can't check signature: No public key @@ -29,135 +29,167 @@ dpkg-source: info: applying 02_parse_h_dependency I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/1623204/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/1905680/tmp/hooks/D01_modify_environment starting +debug: Running on ionos1-amd64. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +'/bin/sh' -> '/bin/bash' +lrwxrwxrwx 1 root root 9 Apr 20 07:40 /bin/sh -> /bin/bash +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/1905680/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/1905680/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='amd64' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=16' - DISTRIBUTION='bookworm' - HOME='/root' - HOST_ARCH='amd64' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:. + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="2" [2]="15" [3]="1" [4]="release" [5]="x86_64-pc-linux-gnu") + BASH_VERSION='5.2.15(1)-release' + BUILDDIR=/build + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=amd64 + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=15' + DIRSTACK=() + DISTRIBUTION=bookworm + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=x86_64 + HOST_ARCH=amd64 IFS=' ' - INVOCATION_ID='d0bb830bd03e4199bc71e6140577c2e6' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='1623204' - PS1='# ' - PS2='> ' + INVOCATION_ID=449804e9e3214a518d7ad1d52a202fcc + LANG=C + LANGUAGE=et_EE:et + LC_ALL=C + MACHTYPE=x86_64-pc-linux-gnu + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnu + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=1905680 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.xMwLsFQH/pbuilderrc_boPv --distribution bookworm --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/bookworm-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.xMwLsFQH/b1 --logfile b1/build.log bison_3.8.2+dfsg-1.dsc' - SUDO_GID='111' - SUDO_UID='106' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://85.184.249.68:3128' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.xMwLsFQH/pbuilderrc_GVEd --distribution bookworm --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/bookworm-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.xMwLsFQH/b2 --logfile b2/build.log --extrapackages usrmerge bison_3.8.2+dfsg-1.dsc' + SUDO_GID=110 + SUDO_UID=105 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://78.137.99.97:3128 I: uname -a - Linux ionos15-amd64 6.1.0-0.deb11.5-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.1.12-1~bpo11+1 (2023-03-05) x86_64 GNU/Linux + Linux i-capture-the-hostname 5.10.0-21-amd64 #1 SMP Debian 5.10.162-1 (2023-01-21) x86_64 GNU/Linux I: ls -l /bin total 5632 - -rwxr-xr-x 1 root root 1265648 Feb 12 2023 bash - -rwxr-xr-x 3 root root 39224 Sep 18 2022 bunzip2 - -rwxr-xr-x 3 root root 39224 Sep 18 2022 bzcat - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzcmp -> bzdiff - -rwxr-xr-x 1 root root 2225 Sep 18 2022 bzdiff - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzegrep -> bzgrep - -rwxr-xr-x 1 root root 4893 Nov 27 2021 bzexe - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzfgrep -> bzgrep - -rwxr-xr-x 1 root root 3775 Sep 18 2022 bzgrep - -rwxr-xr-x 3 root root 39224 Sep 18 2022 bzip2 - -rwxr-xr-x 1 root root 14568 Sep 18 2022 bzip2recover - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzless -> bzmore - -rwxr-xr-x 1 root root 1297 Sep 18 2022 bzmore - -rwxr-xr-x 1 root root 44016 Sep 20 2022 cat - -rwxr-xr-x 1 root root 68656 Sep 20 2022 chgrp - -rwxr-xr-x 1 root root 64496 Sep 20 2022 chmod - -rwxr-xr-x 1 root root 72752 Sep 20 2022 chown - -rwxr-xr-x 1 root root 151152 Sep 20 2022 cp - -rwxr-xr-x 1 root root 125640 Jan 5 2023 dash - -rwxr-xr-x 1 root root 121904 Sep 20 2022 date - -rwxr-xr-x 1 root root 89240 Sep 20 2022 dd - -rwxr-xr-x 1 root root 102200 Sep 20 2022 df - -rwxr-xr-x 1 root root 151344 Sep 20 2022 dir - -rwxr-xr-x 1 root root 88656 Mar 22 2023 dmesg - lrwxrwxrwx 1 root root 8 Dec 19 2022 dnsdomainname -> hostname - lrwxrwxrwx 1 root root 8 Dec 19 2022 domainname -> hostname - -rwxr-xr-x 1 root root 43856 Sep 20 2022 echo - -rwxr-xr-x 1 root root 41 Jan 24 2023 egrep - -rwxr-xr-x 1 root root 35664 Sep 20 2022 false - -rwxr-xr-x 1 root root 41 Jan 24 2023 fgrep - -rwxr-xr-x 1 root root 85600 Mar 22 2023 findmnt - -rwsr-xr-x 1 root root 35128 Mar 22 2023 fusermount - -rwxr-xr-x 1 root root 203152 Jan 24 2023 grep - -rwxr-xr-x 2 root root 2346 Apr 9 2022 gunzip - -rwxr-xr-x 1 root root 6447 Apr 9 2022 gzexe - -rwxr-xr-x 1 root root 98136 Apr 9 2022 gzip - -rwxr-xr-x 1 root root 22680 Dec 19 2022 hostname - -rwxr-xr-x 1 root root 72824 Sep 20 2022 ln - -rwxr-xr-x 1 root root 53024 Mar 23 2023 login - -rwxr-xr-x 1 root root 151344 Sep 20 2022 ls - -rwxr-xr-x 1 root root 207168 Mar 22 2023 lsblk - -rwxr-xr-x 1 root root 97552 Sep 20 2022 mkdir - -rwxr-xr-x 1 root root 72912 Sep 20 2022 mknod - -rwxr-xr-x 1 root root 43952 Sep 20 2022 mktemp - -rwxr-xr-x 1 root root 59712 Mar 22 2023 more - -rwsr-xr-x 1 root root 59704 Mar 22 2023 mount - -rwxr-xr-x 1 root root 18744 Mar 22 2023 mountpoint - -rwxr-xr-x 1 root root 142968 Sep 20 2022 mv - lrwxrwxrwx 1 root root 8 Dec 19 2022 nisdomainname -> hostname - lrwxrwxrwx 1 root root 14 Apr 2 2023 pidof -> /sbin/killall5 - -rwxr-xr-x 1 root root 43952 Sep 20 2022 pwd - lrwxrwxrwx 1 root root 4 Feb 12 2023 rbash -> bash - -rwxr-xr-x 1 root root 52112 Sep 20 2022 readlink - -rwxr-xr-x 1 root root 72752 Sep 20 2022 rm - -rwxr-xr-x 1 root root 56240 Sep 20 2022 rmdir - -rwxr-xr-x 1 root root 27560 Nov 2 2022 run-parts - -rwxr-xr-x 1 root root 126424 Jan 5 2023 sed - lrwxrwxrwx 1 root root 4 Jan 5 2023 sh -> dash - -rwxr-xr-x 1 root root 43888 Sep 20 2022 sleep - -rwxr-xr-x 1 root root 85008 Sep 20 2022 stty - -rwsr-xr-x 1 root root 72000 Mar 22 2023 su - -rwxr-xr-x 1 root root 39824 Sep 20 2022 sync - -rwxr-xr-x 1 root root 531984 Apr 6 2023 tar - -rwxr-xr-x 1 root root 14520 Nov 2 2022 tempfile - -rwxr-xr-x 1 root root 109616 Sep 20 2022 touch - -rwxr-xr-x 1 root root 35664 Sep 20 2022 true - -rwxr-xr-x 1 root root 14568 Mar 22 2023 ulockmgr_server - -rwsr-xr-x 1 root root 35128 Mar 22 2023 umount - -rwxr-xr-x 1 root root 43888 Sep 20 2022 uname - -rwxr-xr-x 2 root root 2346 Apr 9 2022 uncompress - -rwxr-xr-x 1 root root 151344 Sep 20 2022 vdir - -rwxr-xr-x 1 root root 72024 Mar 22 2023 wdctl - lrwxrwxrwx 1 root root 8 Dec 19 2022 ypdomainname -> hostname - -rwxr-xr-x 1 root root 1984 Apr 9 2022 zcat - -rwxr-xr-x 1 root root 1678 Apr 9 2022 zcmp - -rwxr-xr-x 1 root root 6460 Apr 9 2022 zdiff - -rwxr-xr-x 1 root root 29 Apr 9 2022 zegrep - -rwxr-xr-x 1 root root 29 Apr 9 2022 zfgrep - -rwxr-xr-x 1 root root 2081 Apr 9 2022 zforce - -rwxr-xr-x 1 root root 8103 Apr 9 2022 zgrep - -rwxr-xr-x 1 root root 2206 Apr 9 2022 zless - -rwxr-xr-x 1 root root 1842 Apr 9 2022 zmore - -rwxr-xr-x 1 root root 4577 Apr 9 2022 znew -I: user script /srv/workspace/pbuilder/1623204/tmp/hooks/D02_print_environment finished + -rwxr-xr-x 1 root root 1265648 Feb 13 10:05 bash + -rwxr-xr-x 3 root root 39224 Sep 19 2022 bunzip2 + -rwxr-xr-x 3 root root 39224 Sep 19 2022 bzcat + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzcmp -> bzdiff + -rwxr-xr-x 1 root root 2225 Sep 19 2022 bzdiff + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzegrep -> bzgrep + -rwxr-xr-x 1 root root 4893 Nov 28 2021 bzexe + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzfgrep -> bzgrep + -rwxr-xr-x 1 root root 3775 Sep 19 2022 bzgrep + -rwxr-xr-x 3 root root 39224 Sep 19 2022 bzip2 + -rwxr-xr-x 1 root root 14568 Sep 19 2022 bzip2recover + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzless -> bzmore + -rwxr-xr-x 1 root root 1297 Sep 19 2022 bzmore + -rwxr-xr-x 1 root root 44016 Sep 21 2022 cat + -rwxr-xr-x 1 root root 68656 Sep 21 2022 chgrp + -rwxr-xr-x 1 root root 64496 Sep 21 2022 chmod + -rwxr-xr-x 1 root root 72752 Sep 21 2022 chown + -rwxr-xr-x 1 root root 151152 Sep 21 2022 cp + -rwxr-xr-x 1 root root 125640 Jan 6 03:20 dash + -rwxr-xr-x 1 root root 121904 Sep 21 2022 date + -rwxr-xr-x 1 root root 89240 Sep 21 2022 dd + -rwxr-xr-x 1 root root 102200 Sep 21 2022 df + -rwxr-xr-x 1 root root 151344 Sep 21 2022 dir + -rwxr-xr-x 1 root root 88656 Mar 24 00:02 dmesg + lrwxrwxrwx 1 root root 8 Dec 20 03:33 dnsdomainname -> hostname + lrwxrwxrwx 1 root root 8 Dec 20 03:33 domainname -> hostname + -rwxr-xr-x 1 root root 43856 Sep 21 2022 echo + -rwxr-xr-x 1 root root 41 Jan 25 04:43 egrep + -rwxr-xr-x 1 root root 35664 Sep 21 2022 false + -rwxr-xr-x 1 root root 41 Jan 25 04:43 fgrep + -rwxr-xr-x 1 root root 85600 Mar 24 00:02 findmnt + -rwsr-xr-x 1 root root 35128 Mar 23 22:35 fusermount + -rwxr-xr-x 1 root root 203152 Jan 25 04:43 grep + -rwxr-xr-x 2 root root 2346 Apr 10 2022 gunzip + -rwxr-xr-x 1 root root 6447 Apr 10 2022 gzexe + -rwxr-xr-x 1 root root 98136 Apr 10 2022 gzip + -rwxr-xr-x 1 root root 22680 Dec 20 03:33 hostname + -rwxr-xr-x 1 root root 72824 Sep 21 2022 ln + -rwxr-xr-x 1 root root 53024 Mar 24 02:40 login + -rwxr-xr-x 1 root root 151344 Sep 21 2022 ls + -rwxr-xr-x 1 root root 207168 Mar 24 00:02 lsblk + -rwxr-xr-x 1 root root 97552 Sep 21 2022 mkdir + -rwxr-xr-x 1 root root 72912 Sep 21 2022 mknod + -rwxr-xr-x 1 root root 43952 Sep 21 2022 mktemp + -rwxr-xr-x 1 root root 59712 Mar 24 00:02 more + -rwsr-xr-x 1 root root 59704 Mar 24 00:02 mount + -rwxr-xr-x 1 root root 18744 Mar 24 00:02 mountpoint + -rwxr-xr-x 1 root root 142968 Sep 21 2022 mv + lrwxrwxrwx 1 root root 8 Dec 20 03:33 nisdomainname -> hostname + lrwxrwxrwx 1 root root 14 Apr 3 20:25 pidof -> /sbin/killall5 + -rwxr-xr-x 1 root root 43952 Sep 21 2022 pwd + lrwxrwxrwx 1 root root 4 Feb 13 10:05 rbash -> bash + -rwxr-xr-x 1 root root 52112 Sep 21 2022 readlink + -rwxr-xr-x 1 root root 72752 Sep 21 2022 rm + -rwxr-xr-x 1 root root 56240 Sep 21 2022 rmdir + -rwxr-xr-x 1 root root 27560 Nov 3 06:31 run-parts + -rwxr-xr-x 1 root root 126424 Jan 6 09:55 sed + lrwxrwxrwx 1 root root 9 Apr 20 07:40 sh -> /bin/bash + -rwxr-xr-x 1 root root 43888 Sep 21 2022 sleep + -rwxr-xr-x 1 root root 85008 Sep 21 2022 stty + -rwsr-xr-x 1 root root 72000 Mar 24 00:02 su + -rwxr-xr-x 1 root root 39824 Sep 21 2022 sync + -rwxr-xr-x 1 root root 531984 Apr 7 04:25 tar + -rwxr-xr-x 1 root root 14520 Nov 3 06:31 tempfile + -rwxr-xr-x 1 root root 109616 Sep 21 2022 touch + -rwxr-xr-x 1 root root 35664 Sep 21 2022 true + -rwxr-xr-x 1 root root 14568 Mar 23 22:35 ulockmgr_server + -rwsr-xr-x 1 root root 35128 Mar 24 00:02 umount + -rwxr-xr-x 1 root root 43888 Sep 21 2022 uname + -rwxr-xr-x 2 root root 2346 Apr 10 2022 uncompress + -rwxr-xr-x 1 root root 151344 Sep 21 2022 vdir + -rwxr-xr-x 1 root root 72024 Mar 24 00:02 wdctl + lrwxrwxrwx 1 root root 8 Dec 20 03:33 ypdomainname -> hostname + -rwxr-xr-x 1 root root 1984 Apr 10 2022 zcat + -rwxr-xr-x 1 root root 1678 Apr 10 2022 zcmp + -rwxr-xr-x 1 root root 6460 Apr 10 2022 zdiff + -rwxr-xr-x 1 root root 29 Apr 10 2022 zegrep + -rwxr-xr-x 1 root root 29 Apr 10 2022 zfgrep + -rwxr-xr-x 1 root root 2081 Apr 10 2022 zforce + -rwxr-xr-x 1 root root 8103 Apr 10 2022 zgrep + -rwxr-xr-x 1 root root 2206 Apr 10 2022 zless + -rwxr-xr-x 1 root root 1842 Apr 10 2022 zmore + -rwxr-xr-x 1 root root 4577 Apr 10 2022 znew +I: user script /srv/workspace/pbuilder/1905680/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -234,7 +266,7 @@ Get: 31 http://deb.debian.org/debian bookworm/main amd64 po-debconf all 1.0.21+nmu1 [248 kB] Get: 32 http://deb.debian.org/debian bookworm/main amd64 debhelper all 13.11.4 [942 kB] Get: 33 http://deb.debian.org/debian bookworm/main amd64 help2man amd64 1.49.3 [198 kB] -Fetched 19.4 MB in 0s (73.5 MB/s) +Fetched 19.4 MB in 0s (60.0 MB/s) debconf: delaying package configuration, since apt-utils is not installed Selecting previously unselected package m4. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19596 files and directories currently installed.) @@ -383,10 +415,17 @@ Reading package lists... Building dependency tree... Reading state information... +usrmerge is already the newest version (35). fakeroot is already the newest version (1.31-1.2). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../bison_3.8.2+dfsg-1_source.changes +I: user script /srv/workspace/pbuilder/1905680/tmp/hooks/A99_set_merged_usr starting +Re-configuring usrmerge... +removed '/etc/unsupported-skip-usrmerge-conversion' +The system has been successfully converted. +I: user script /srv/workspace/pbuilder/1905680/tmp/hooks/A99_set_merged_usr finished +hostname: Name or service not known +I: Running cd /build/bison-3.8.2+dfsg/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../bison_3.8.2+dfsg-1_source.changes dpkg-buildpackage: info: source package bison dpkg-buildpackage: info: source version 2:3.8.2+dfsg-1 dpkg-buildpackage: info: source distribution unstable @@ -431,7 +470,7 @@ ./configure --build=x86_64-linux-gnu --prefix=/usr --includedir=\${prefix}/include --mandir=\${prefix}/share/man --infodir=\${prefix}/share/info --sysconfdir=/etc --localstatedir=/var --disable-option-checking --disable-silent-rules --libdir=\${prefix}/lib/x86_64-linux-gnu --runstatedir=/run --disable-maintainer-mode --disable-dependency-tracking --disable-silent-rules checking for a BSD-compatible install... /usr/bin/install -c checking whether build environment is sane... yes -checking for a race-free mkdir -p... /bin/mkdir -p +checking for a race-free mkdir -p... /usr/bin/mkdir -p checking for gawk... no checking for mawk... mawk checking whether make sets $(MAKE)... yes @@ -495,8 +534,8 @@ checking build system type... x86_64-pc-linux-gnu checking host system type... x86_64-pc-linux-gnu checking how to run the C preprocessor... gcc -E -checking for grep that handles long lines and -e... /bin/grep -checking for egrep... /bin/grep -E +checking for grep that handles long lines and -e... /usr/bin/grep +checking for egrep... /usr/bin/grep -E checking for Minix Amsterdam compiler... no checking for ar... ar checking for ranlib... ranlib @@ -507,7 +546,7 @@ checking for shared library run path origin... done checking 32-bit host C ABI... no checking for ELF binary format... yes -checking for the common suffixes of directories in the library search path... lib,lib,lib +checking for the common suffixes of directories in the library search path... lib,lib,lib64 checking for libtextstyle... no checking for inline... inline checking for tcdrain... yes @@ -694,7 +733,7 @@ checking for pthread_kill in -lpthread... yes checking whether POSIX threads API is available... yes checking for multithread API to use... posix -checking for a sed that does not truncate output... /bin/sed +checking for a sed that does not truncate output... /usr/bin/sed checking whether NAN macro works... yes checking whether HUGE_VAL works... yes checking for mbstate_t... yes @@ -993,14 +1032,17 @@ /build/bison-3.8.2+dfsg/src/getargs.c make[1]: Leaving directory '/build/bison-3.8.2+dfsg' dh_auto_build - make -j16 + make -j15 make[1]: Entering directory '/build/bison-3.8.2+dfsg' rm -f examples/c/reccalc/scan.stamp examples/c/reccalc/scan.stamp.tmp +/usr/bin/mkdir -p examples/c/reccalc rm -f lib/alloca.h-t lib/alloca.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''HAVE_ALLOCA_H''@|1|g' < ./lib/alloca.in.h; \ } > lib/alloca.h-t && \ mv -f lib/alloca.h-t lib/alloca.h +touch examples/c/reccalc/scan.stamp.tmp +flex -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l rm -f lib/configmake.h-t && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ echo '#if HAVE_WINSOCK2_H'; \ @@ -1036,7 +1078,6 @@ echo '#define PKGLIBEXECDIR "/usr/libexec/bison"'; \ } | sed '/""/d' > lib/configmake.h-t && \ mv -f lib/configmake.h-t lib/configmake.h -/bin/mkdir -p examples/c/reccalc rm -f lib/fcntl.h-t lib/fcntl.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1081,7 +1122,6 @@ < ./lib/iconv.in.h; \ } > lib/iconv.h-t && \ mv lib/iconv.h-t lib/iconv.h -touch examples/c/reccalc/scan.stamp.tmp rm -f lib/inttypes.h-t lib/inttypes.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's/@''HAVE_INTTYPES_H''@/1/g' \ @@ -1117,6 +1157,7 @@ cat ./lib/textstyle.in.h; \ } > lib/textstyle.h-t && \ mv lib/textstyle.h-t lib/textstyle.h +mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp rm -f lib/limits.h-t lib/limits.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1127,7 +1168,6 @@ < ./lib/limits.in.h; \ } > lib/limits.h-t && \ mv lib/limits.h-t lib/limits.h -flex -oexamples/c/reccalc/scan.c --header=examples/c/reccalc/scan.h ./examples/c/reccalc/scan.l rm -f lib/locale.h-t lib/locale.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1467,7 +1507,7 @@ < ./lib/sched.in.h; \ } > lib/sched.h-t && \ mv lib/sched.h-t lib/sched.h -/bin/mkdir -p lib/malloc +/usr/bin/mkdir -p lib/malloc rm -f lib/signal.h-t lib/signal.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1497,6 +1537,15 @@ < ./lib/signal.in.h; \ } > lib/signal.h-t && \ mv lib/signal.h-t lib/signal.h +rm -f lib/malloc/scratch_buffer.gl.h-t lib/malloc/scratch_buffer.gl.h && \ +{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ + sed -e 's|__always_inline|inline _GL_ATTRIBUTE_ALWAYS_INLINE|g' \ + -e 's|__glibc_likely|_GL_LIKELY|g' \ + -e 's|__glibc_unlikely|_GL_UNLIKELY|g' \ + -e '/libc_hidden_proto/d' \ + < ./lib/malloc/scratch_buffer.h; \ +} > lib/malloc/scratch_buffer.gl.h-t && \ +mv lib/malloc/scratch_buffer.gl.h-t lib/malloc/scratch_buffer.gl.h rm -f lib/spawn.h-t lib/spawn.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1545,15 +1594,6 @@ < ./lib/spawn.in.h; \ } > lib/spawn.h-t && \ mv lib/spawn.h-t lib/spawn.h -rm -f lib/malloc/scratch_buffer.gl.h-t lib/malloc/scratch_buffer.gl.h && \ -{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ - sed -e 's|__always_inline|inline _GL_ATTRIBUTE_ALWAYS_INLINE|g' \ - -e 's|__glibc_likely|_GL_LIKELY|g' \ - -e 's|__glibc_unlikely|_GL_UNLIKELY|g' \ - -e '/libc_hidden_proto/d' \ - < ./lib/malloc/scratch_buffer.h; \ -} > lib/malloc/scratch_buffer.gl.h-t && \ -mv lib/malloc/scratch_buffer.gl.h-t lib/malloc/scratch_buffer.gl.h rm -f lib/stdio.h-t lib/stdio.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */' && \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1905,9 +1945,7 @@ < ./lib/string.in.h; \ } > lib/string.h-t && \ mv lib/string.h-t lib/string.h -/bin/mkdir -p lib/sys -/bin/mkdir -p lib/sys -/bin/mkdir -p lib/sys +/usr/bin/mkdir -p lib/sys rm -f lib/sys/ioctl.h-t lib/sys/ioctl.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1925,7 +1963,7 @@ < ./lib/sys_ioctl.in.h; \ } > lib/sys/ioctl.h-t && \ mv lib/sys/ioctl.h-t lib/sys/ioctl.h -mv examples/c/reccalc/scan.stamp.tmp examples/c/reccalc/scan.stamp +/usr/bin/mkdir -p lib/sys rm -f lib/sys/resource.h-t lib/sys/resource.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -1942,6 +1980,7 @@ < ./lib/sys_resource.in.h; \ } > lib/sys/resource.h-t && \ mv -f lib/sys/resource.h-t lib/sys/resource.h +/usr/bin/mkdir -p lib/sys rm -f lib/sys/stat.h-t lib/sys/stat.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2000,8 +2039,8 @@ < ./lib/sys_stat.in.h; \ } > lib/sys/stat.h-t && \ mv lib/sys/stat.h-t lib/sys/stat.h -/bin/mkdir -p lib/sys -/bin/mkdir -p lib/sys +/usr/bin/mkdir -p lib/sys +/usr/bin/mkdir -p lib/sys rm -f lib/sys/time.h-t lib/sys/time.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2038,8 +2077,33 @@ < ./lib/sys_times.in.h; \ } > lib/sys/times.h-t && \ mv lib/sys/times.h-t lib/sys/times.h -/bin/mkdir -p lib/sys -/bin/mkdir -p lib/sys +/usr/bin/mkdir -p lib/sys +rm -f lib/sys/types.h-t lib/sys/types.h && \ +{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ + sed -e 's|@''GUARD_PREFIX''@|GL|g' \ + -e 's|@''INCLUDE_NEXT''@|include_next|g' \ + -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ + -e 's|@''PRAGMA_COLUMNS''@||g' \ + -e 's|@''NEXT_SYS_TYPES_H''@||g' \ + -e 's|@''WINDOWS_64_BIT_OFF_T''@|0|g' \ + -e 's|@''WINDOWS_STAT_INODES''@|0|g' \ + < ./lib/sys_types.in.h; \ +} > lib/sys/types.h-t && \ +mv lib/sys/types.h-t lib/sys/types.h +/usr/bin/mkdir -p lib/sys +rm -f lib/sys/wait.h-t lib/sys/wait.h && \ +{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ + sed -e 's|@''GUARD_PREFIX''@|GL|g' \ + -e 's|@''INCLUDE_NEXT''@|include_next|g' \ + -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ + -e 's|@''PRAGMA_COLUMNS''@||g' \ + -e 's|@''NEXT_SYS_WAIT_H''@||g' \ + -e 's/@''GNULIB_WAITPID''@/1/g' \ + -e '/definitions of _GL_FUNCDECL_RPL/r ./lib/c++defs.h' \ + -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h' \ + < ./lib/sys_wait.in.h; \ +} > lib/sys/wait.h-t && \ +mv lib/sys/wait.h-t lib/sys/wait.h rm -f lib/termios.h-t lib/termios.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2100,31 +2164,6 @@ < ./lib/time.in.h; \ } > lib/time.h-t && \ mv lib/time.h-t lib/time.h -rm -f lib/sys/types.h-t lib/sys/types.h && \ -{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ - sed -e 's|@''GUARD_PREFIX''@|GL|g' \ - -e 's|@''INCLUDE_NEXT''@|include_next|g' \ - -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ - -e 's|@''PRAGMA_COLUMNS''@||g' \ - -e 's|@''NEXT_SYS_TYPES_H''@||g' \ - -e 's|@''WINDOWS_64_BIT_OFF_T''@|0|g' \ - -e 's|@''WINDOWS_STAT_INODES''@|0|g' \ - < ./lib/sys_types.in.h; \ -} > lib/sys/types.h-t && \ -mv lib/sys/types.h-t lib/sys/types.h -rm -f lib/sys/wait.h-t lib/sys/wait.h && \ -{ echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ - sed -e 's|@''GUARD_PREFIX''@|GL|g' \ - -e 's|@''INCLUDE_NEXT''@|include_next|g' \ - -e 's|@''PRAGMA_SYSTEM_HEADER''@|#pragma GCC system_header|g' \ - -e 's|@''PRAGMA_COLUMNS''@||g' \ - -e 's|@''NEXT_SYS_WAIT_H''@||g' \ - -e 's/@''GNULIB_WAITPID''@/1/g' \ - -e '/definitions of _GL_FUNCDECL_RPL/r ./lib/c++defs.h' \ - -e '/definition of _GL_WARN_ON_USE/r ./lib/warn-on-use.h' \ - < ./lib/sys_wait.in.h; \ -} > lib/sys/wait.h-t && \ -mv lib/sys/wait.h-t lib/sys/wait.h rm -f lib/unistd.h-t lib/unistd.h && \ { echo '/* DO NOT EDIT! GENERATED AUTOMATICALLY! */'; \ sed -e 's|@''GUARD_PREFIX''@|GL|g' \ @@ -2499,9 +2538,84 @@ make[3]: Leaving directory '/build/bison-3.8.2+dfsg/gnulib-po' Making all in . make[3]: Entering directory '/build/bison-3.8.2+dfsg' +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-bitsetv.o `test -f 'lib/bitsetv.c' || echo './'`lib/bitsetv.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-ctype.o `test -f 'lib/c-ctype.c' || echo './'`lib/c-ctype.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-strcasecmp.o `test -f 'lib/c-strcasecmp.c' || echo './'`lib/c-strcasecmp.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-strncasecmp.o `test -f 'lib/c-strncasecmp.c' || echo './'`lib/c-strncasecmp.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-canonicalize.o `test -f 'lib/canonicalize.c' || echo './'`lib/canonicalize.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-careadlinkat.o `test -f 'lib/careadlinkat.c' || echo './'`lib/careadlinkat.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-cloexec.o `test -f 'lib/cloexec.c' || echo './'`lib/cloexec.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-close-stream.o `test -f 'lib/close-stream.c' || echo './'`lib/close-stream.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-closeout.o `test -f 'lib/closeout.c' || echo './'`lib/closeout.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-concat-filename.o `test -f 'lib/concat-filename.c' || echo './'`lib/concat-filename.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname.o `test -f 'lib/dirname.c' || echo './'`lib/dirname.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-basename.o `test -f 'lib/basename.c' || echo './'`lib/basename.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname-lgpl.o `test -f 'lib/dirname-lgpl.c' || echo './'`lib/dirname-lgpl.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-stripslash.o `test -f 'lib/stripslash.c' || echo './'`lib/stripslash.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-execute.o `test -f 'lib/execute.c' || echo './'`lib/execute.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-exitfail.o `test -f 'lib/exitfail.c' || echo './'`lib/exitfail.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fatal-signal.o `test -f 'lib/fatal-signal.c' || echo './'`lib/fatal-signal.c +lib/careadlinkat.c: In function 'careadlinkat': +lib/careadlinkat.c:178:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] + 178 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." + | ^~~~~~~ +lib/careadlinkat.c:179:5: warning: #warning "See ." [-Wcpp] + 179 | #warning "See ." + | ^~~~~~~ +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fd-safer-flag.o `test -f 'lib/fd-safer-flag.c' || echo './'`lib/fd-safer-flag.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dup-safer-flag.o `test -f 'lib/dup-safer-flag.c' || echo './'`lib/dup-safer-flag.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-file-set.o `test -f 'lib/file-set.c' || echo './'`lib/file-set.c +lib/careadlinkat.c:182:10: warning: function may return address of local variable [-Wreturn-local-addr] + 182 | return readlink_stk (fd, filename, buffer, buffer_size, alloc, + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + 183 | preadlinkat, stack_buf); + | ~~~~~~~~~~~~~~~~~~~~~~~ +lib/careadlinkat.c:181:8: note: declared here + 181 | char stack_buf[STACK_BUF_SIZE]; + | ^~~~~~~~~ +lib/canonicalize.c: In function 'canonicalize_filename_mode': +lib/canonicalize.c:484:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] + 484 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." + | ^~~~~~~ +lib/canonicalize.c:485:5: warning: #warning "See ." [-Wcpp] + 485 | #warning "See ." + | ^~~~~~~ +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-findprog-in.o `test -f 'lib/findprog-in.c' || echo './'`lib/findprog-in.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fopen-safer.o `test -f 'lib/fopen-safer.c' || echo './'`lib/fopen-safer.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fstrcmp.o `test -f 'lib/fstrcmp.c' || echo './'`lib/fstrcmp.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gethrxtime.o `test -f 'lib/gethrxtime.c' || echo './'`lib/gethrxtime.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-xtime.o `test -f 'lib/xtime.c' || echo './'`lib/xtime.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-getprogname.o `test -f 'lib/getprogname.c' || echo './'`lib/getprogname.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gettime.o `test -f 'lib/gettime.c' || echo './'`lib/gettime.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hard-locale.o `test -f 'lib/hard-locale.c' || echo './'`lib/hard-locale.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash.o `test -f 'lib/hash.c' || echo './'`lib/hash.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_hash_map.o `test -f 'lib/gl_hash_map.c' || echo './'`lib/gl_hash_map.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash-pjw.o `test -f 'lib/hash-pjw.c' || echo './'`lib/hash-pjw.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash-triple-simple.o `test -f 'lib/hash-triple-simple.c' || echo './'`lib/hash-triple-simple.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-ialloc.o `test -f 'lib/ialloc.c' || echo './'`lib/ialloc.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-integer_length.o `test -f 'lib/integer_length.c' || echo './'`lib/integer_length.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-integer_length_l.o `test -f 'lib/integer_length_l.c' || echo './'`lib/integer_length_l.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_linked_list.o `test -f 'lib/gl_linked_list.c' || echo './'`lib/gl_linked_list.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_list.o `test -f 'lib/gl_list.c' || echo './'`lib/gl_list.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-localcharset.o `test -f 'lib/localcharset.c' || echo './'`lib/localcharset.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_map.o `test -f 'lib/gl_map.c' || echo './'`lib/gl_map.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-math.o `test -f 'lib/math.c' || echo './'`lib/math.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbchar.o `test -f 'lib/mbchar.c' || echo './'`lib/mbchar.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbfile.o `test -f 'lib/mbfile.c' || echo './'`lib/mbfile.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbswidth.o `test -f 'lib/mbswidth.c' || echo './'`lib/mbswidth.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_oset.o `test -f 'lib/gl_oset.c' || echo './'`lib/gl_oset.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-pipe2.o `test -f 'lib/pipe2.c' || echo './'`lib/pipe2.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-pipe2-safer.o `test -f 'lib/pipe2-safer.c' || echo './'`lib/pipe2-safer.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-printf-frexp.o `test -f 'lib/printf-frexp.c' || echo './'`lib/printf-frexp.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-printf-frexpl.o `test -f 'lib/printf-frexpl.c' || echo './'`lib/printf-frexpl.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-progname.o `test -f 'lib/progname.c' || echo './'`lib/progname.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-quotearg.o `test -f 'lib/quotearg.c' || echo './'`lib/quotearg.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_rbtree_oset.o `test -f 'lib/gl_rbtree_oset.c' || echo './'`lib/gl_rbtree_oset.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_rbtreehash_list.o `test -f 'lib/gl_rbtreehash_list.c' || echo './'`lib/gl_rbtreehash_list.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-setlocale_null.o `test -f 'lib/setlocale_null.c' || echo './'`lib/setlocale_null.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-sig-handler.o `test -f 'lib/sig-handler.c' || echo './'`lib/sig-handler.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-spawn-pipe.o `test -f 'lib/spawn-pipe.c' || echo './'`lib/spawn-pipe.c +gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/glthread/libbison_a-threadlib.o `test -f 'lib/glthread/threadlib.c' || echo './'`lib/glthread/threadlib.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-timespec.o `test -f 'lib/timespec.c' || echo './'`lib/timespec.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-timevar.o `test -f 'lib/timevar.c' || echo './'`lib/timevar.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/glthread/libbison_a-tls.o `test -f 'lib/glthread/tls.c' || echo './'`lib/glthread/tls.c @@ -2602,86 +2716,11 @@ gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/bitset/libbison_a-table.o `test -f 'lib/bitset/table.c' || echo './'`lib/bitset/table.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/bitset/libbison_a-list.o `test -f 'lib/bitset/list.c' || echo './'`lib/bitset/list.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/bitset/libbison_a-vector.o `test -f 'lib/bitset/vector.c' || echo './'`lib/bitset/vector.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-bitsetv.o `test -f 'lib/bitsetv.c' || echo './'`lib/bitsetv.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-ctype.o `test -f 'lib/c-ctype.c' || echo './'`lib/c-ctype.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-strcasecmp.o `test -f 'lib/c-strcasecmp.c' || echo './'`lib/c-strcasecmp.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-c-strncasecmp.o `test -f 'lib/c-strncasecmp.c' || echo './'`lib/c-strncasecmp.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-canonicalize.o `test -f 'lib/canonicalize.c' || echo './'`lib/canonicalize.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-careadlinkat.o `test -f 'lib/careadlinkat.c' || echo './'`lib/careadlinkat.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-cloexec.o `test -f 'lib/cloexec.c' || echo './'`lib/cloexec.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-close-stream.o `test -f 'lib/close-stream.c' || echo './'`lib/close-stream.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-closeout.o `test -f 'lib/closeout.c' || echo './'`lib/closeout.c -lib/careadlinkat.c: In function 'careadlinkat': -lib/careadlinkat.c:178:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] - 178 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." - | ^~~~~~~ -lib/careadlinkat.c:179:5: warning: #warning "See ." [-Wcpp] - 179 | #warning "See ." - | ^~~~~~~ -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-concat-filename.o `test -f 'lib/concat-filename.c' || echo './'`lib/concat-filename.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname.o `test -f 'lib/dirname.c' || echo './'`lib/dirname.c -lib/careadlinkat.c:182:10: warning: function may return address of local variable [-Wreturn-local-addr] - 182 | return readlink_stk (fd, filename, buffer, buffer_size, alloc, - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - 183 | preadlinkat, stack_buf); - | ~~~~~~~~~~~~~~~~~~~~~~~ -lib/careadlinkat.c:181:8: note: declared here - 181 | char stack_buf[STACK_BUF_SIZE]; - | ^~~~~~~~~ -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-basename.o `test -f 'lib/basename.c' || echo './'`lib/basename.c -lib/canonicalize.c: In function 'canonicalize_filename_mode': -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dirname-lgpl.o `test -f 'lib/dirname-lgpl.c' || echo './'`lib/dirname-lgpl.c -lib/canonicalize.c:484:5: warning: #warning "GCC might issue a bogus -Wreturn-local-addr warning here." [-Wcpp] - 484 | #warning "GCC might issue a bogus -Wreturn-local-addr warning here." - | ^~~~~~~ -lib/canonicalize.c:485:5: warning: #warning "See ." [-Wcpp] - 485 | #warning "See ." - | ^~~~~~~ -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-stripslash.o `test -f 'lib/stripslash.c' || echo './'`lib/stripslash.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-execute.o `test -f 'lib/execute.c' || echo './'`lib/execute.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-exitfail.o `test -f 'lib/exitfail.c' || echo './'`lib/exitfail.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fatal-signal.o `test -f 'lib/fatal-signal.c' || echo './'`lib/fatal-signal.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fd-safer-flag.o `test -f 'lib/fd-safer-flag.c' || echo './'`lib/fd-safer-flag.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-dup-safer-flag.o `test -f 'lib/dup-safer-flag.c' || echo './'`lib/dup-safer-flag.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-file-set.o `test -f 'lib/file-set.c' || echo './'`lib/file-set.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-findprog-in.o `test -f 'lib/findprog-in.c' || echo './'`lib/findprog-in.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fopen-safer.o `test -f 'lib/fopen-safer.c' || echo './'`lib/fopen-safer.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-fstrcmp.o `test -f 'lib/fstrcmp.c' || echo './'`lib/fstrcmp.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gethrxtime.o `test -f 'lib/gethrxtime.c' || echo './'`lib/gethrxtime.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-xtime.o `test -f 'lib/xtime.c' || echo './'`lib/xtime.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-getprogname.o `test -f 'lib/getprogname.c' || echo './'`lib/getprogname.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gettime.o `test -f 'lib/gettime.c' || echo './'`lib/gettime.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hard-locale.o `test -f 'lib/hard-locale.c' || echo './'`lib/hard-locale.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash.o `test -f 'lib/hash.c' || echo './'`lib/hash.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_hash_map.o `test -f 'lib/gl_hash_map.c' || echo './'`lib/gl_hash_map.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash-pjw.o `test -f 'lib/hash-pjw.c' || echo './'`lib/hash-pjw.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-hash-triple-simple.o `test -f 'lib/hash-triple-simple.c' || echo './'`lib/hash-triple-simple.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-ialloc.o `test -f 'lib/ialloc.c' || echo './'`lib/ialloc.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-integer_length.o `test -f 'lib/integer_length.c' || echo './'`lib/integer_length.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-integer_length_l.o `test -f 'lib/integer_length_l.c' || echo './'`lib/integer_length_l.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_linked_list.o `test -f 'lib/gl_linked_list.c' || echo './'`lib/gl_linked_list.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_list.o `test -f 'lib/gl_list.c' || echo './'`lib/gl_list.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-localcharset.o `test -f 'lib/localcharset.c' || echo './'`lib/localcharset.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/glthread/libbison_a-lock.o `test -f 'lib/glthread/lock.c' || echo './'`lib/glthread/lock.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_map.o `test -f 'lib/gl_map.c' || echo './'`lib/gl_map.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-math.o `test -f 'lib/math.c' || echo './'`lib/math.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbchar.o `test -f 'lib/mbchar.c' || echo './'`lib/mbchar.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbfile.o `test -f 'lib/mbfile.c' || echo './'`lib/mbfile.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-mbswidth.o `test -f 'lib/mbswidth.c' || echo './'`lib/mbswidth.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_oset.o `test -f 'lib/gl_oset.c' || echo './'`lib/gl_oset.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-pipe2.o `test -f 'lib/pipe2.c' || echo './'`lib/pipe2.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-pipe2-safer.o `test -f 'lib/pipe2-safer.c' || echo './'`lib/pipe2-safer.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-printf-frexp.o `test -f 'lib/printf-frexp.c' || echo './'`lib/printf-frexp.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-printf-frexpl.o `test -f 'lib/printf-frexpl.c' || echo './'`lib/printf-frexpl.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-progname.o `test -f 'lib/progname.c' || echo './'`lib/progname.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-quotearg.o `test -f 'lib/quotearg.c' || echo './'`lib/quotearg.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_rbtree_oset.o `test -f 'lib/gl_rbtree_oset.c' || echo './'`lib/gl_rbtree_oset.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/libbison_a-gl_rbtreehash_list.o `test -f 'lib/gl_rbtreehash_list.c' || echo './'`lib/gl_rbtreehash_list.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/malloc/libbison_a-scratch_buffer_dupfree.o `test -f 'lib/malloc/scratch_buffer_dupfree.c' || echo './'`lib/malloc/scratch_buffer_dupfree.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/malloc/libbison_a-scratch_buffer_grow.o `test -f 'lib/malloc/scratch_buffer_grow.c' || echo './'`lib/malloc/scratch_buffer_grow.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/malloc/libbison_a-scratch_buffer_grow_preserve.o `test -f 'lib/malloc/scratch_buffer_grow_preserve.c' || echo './'`lib/malloc/scratch_buffer_grow_preserve.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/malloc/libbison_a-scratch_buffer_set_array_size.o `test -f 'lib/malloc/scratch_buffer_set_array_size.c' || echo './'`lib/malloc/scratch_buffer_set_array_size.c -gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/glthread/libbison_a-threadlib.o `test -f 'lib/glthread/threadlib.c' || echo './'`lib/glthread/threadlib.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/unistr/libbison_a-u8-mbtoucr.o `test -f 'lib/unistr/u8-mbtoucr.c' || echo './'`lib/unistr/u8-mbtoucr.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/unistr/libbison_a-u8-uctomb.o `test -f 'lib/unistr/u8-uctomb.c' || echo './'`lib/unistr/u8-uctomb.c gcc -DEXEEXT=\"\" -I. -I./lib -I. -I./lib -DDEFAULT_TEXT_DOMAIN=\"bison-gnulib\" -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o lib/unistr/libbison_a-u8-uctomb-aux.o `test -f 'lib/unistr/u8-uctomb-aux.c' || echo './'`lib/unistr/u8-uctomb-aux.c @@ -2693,14 +2732,14 @@ ar cr lib/libbison.a lib/libbison_a-allocator.o lib/libbison_a-areadlink.o lib/libbison_a-argmatch.o lib/libbison_a-gl_array_list.o lib/libbison_a-basename-lgpl.o lib/libbison_a-binary-io.o lib/libbison_a-bitrotate.o lib/libbison_a-bitset.o lib/bitset/libbison_a-array.o lib/bitset/libbison_a-stats.o lib/bitset/libbison_a-table.o lib/bitset/libbison_a-list.o lib/bitset/libbison_a-vector.o lib/libbison_a-bitsetv.o lib/libbison_a-c-ctype.o lib/libbison_a-c-strcasecmp.o lib/libbison_a-c-strncasecmp.o lib/libbison_a-canonicalize.o lib/libbison_a-careadlinkat.o lib/libbison_a-cloexec.o lib/libbison_a-close-stream.o lib/libbison_a-closeout.o lib/libbison_a-concat-filename.o lib/libbison_a-dirname.o lib/libbison_a-basename.o lib/libbison_a-dirname-lgpl.o lib/libbison_a-stripslash.o lib/libbison_a-execute.o lib/libbison_a-exitfail.o lib/libbison_a-fatal-signal.o lib/libbison_a-fd-safer-flag.o lib/libbison_a-dup-safer-flag.o lib/libbison_a-file-set.o lib/libbison_a-findprog-in.o lib/libbison_a-fopen-safer.o lib/libbison_a-fstrcmp.o lib/libbison_a-gethrxtime.o lib/libbison_a-xtime.o lib/libbison_a-getprogname.o lib/libbison_a-gettime.o lib/libbison_a-hard-locale.o lib/libbison_a-hash.o lib/libbison_a-gl_hash_map.o lib/libbison_a-hash-pjw.o lib/libbison_a-hash-triple-simple.o lib/libbison_a-ialloc.o lib/libbison_a-integer_length.o lib/libbison_a-integer_length_l.o lib/libbison_a-gl_linked_list.o lib/libbison_a-gl_list.o lib/libbison_a-localcharset.o lib/glthread/libbison_a-lock.o lib/libbison_a-gl_map.o lib/libbison_a-math.o lib/libbison_a-mbchar.o lib/libbison_a-mbfile.o lib/libbison_a-mbswidth.o lib/libbison_a-gl_oset.o lib/libbison_a-pipe2.o lib/libbison_a-pipe2-safer.o lib/libbison_a-printf-frexp.o lib/libbison_a-printf-frexpl.o lib/libbison_a-progname.o lib/libbison_a-quotearg.o lib/libbison_a-gl_rbtree_oset.o lib/libbison_a-gl_rbtreehash_list.o lib/malloc/libbison_a-scratch_buffer_dupfree.o lib/malloc/libbison_a-scratch_buffer_grow.o lib/malloc/libbison_a-scratch_buffer_grow_preserve.o lib/malloc/libbison_a-scratch_buffer_set_array_size.o lib/libbison_a-setlocale_null.o lib/libbison_a-sig-handler.o lib/libbison_a-spawn-pipe.o lib/glthread/libbison_a-threadlib.o lib/libbison_a-timespec.o lib/libbison_a-timevar.o lib/glthread/libbison_a-tls.o lib/libbison_a-unicodeio.o lib/libbison_a-unistd.o lib/libbison_a-dup-safer.o lib/libbison_a-fd-safer.o lib/libbison_a-pipe-safer.o lib/unistr/libbison_a-u8-mbtoucr.o lib/unistr/libbison_a-u8-uctomb.o lib/unistr/libbison_a-u8-uctomb-aux.o lib/uniwidth/libbison_a-width.o lib/libbison_a-wait-process.o lib/libbison_a-wctype-h.o lib/libbison_a-xmalloc.o lib/libbison_a-xalloc-die.o lib/libbison_a-xconcat-filename.o lib/libbison_a-xhash.o lib/libbison_a-gl_xlist.o lib/libbison_a-gl_xmap.o lib/libbison_a-xmemdup0.o lib/libbison_a-xreadlink.o lib/libbison_a-xsize.o lib/libbison_a-xstrndup.o lib/libbison_a-get-errno.o lib/libbison_a-path-join.o lib/libbison_a-asnprintf.o lib/libbison_a-asprintf.o lib/libbison_a-fcntl.o lib/libbison_a-fprintf.o lib/libbison_a-fseterr.o lib/libbison_a-mbrtowc.o lib/libbison_a-obstack.o lib/libbison_a-printf.o lib/libbison_a-printf-args.o lib/libbison_a-printf-parse.o lib/libbison_a-readline.o lib/libbison_a-snprintf.o lib/libbison_a-spawn_faction_addchdir.o lib/libbison_a-sprintf.o lib/libbison_a-vasnprintf.o lib/libbison_a-vasprintf.o lib/libbison_a-vfprintf.o lib/libbison_a-vsnprintf.o lib/libbison_a-vsprintf.o ranlib lib/libbison.a gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o src/bison src/bison-AnnotationList.o src/bison-InadequacyList.o src/bison-Sbitset.o src/bison-assoc.o src/bison-closure.o src/bison-complain.o src/bison-conflicts.o src/bison-counterexample.o src/bison-derivation.o src/bison-derives.o src/bison-files.o src/bison-fixits.o src/bison-getargs.o src/bison-glyphs.o src/bison-gram.o src/bison-graphviz.o src/bison-ielr.o src/bison-lalr.o src/bison-location.o src/bison-lr0.o src/bison-lssi.o src/bison-main.o src/bison-muscle-tab.o src/bison-named-ref.o src/bison-nullable.o src/bison-output.o src/bison-parse-gram.o src/bison-parse-simulation.o src/bison-print-graph.o src/bison-print-xml.o src/bison-print.o src/bison-reader.o src/bison-reduce.o src/bison-relation.o src/bison-scan-code-c.o src/bison-scan-gram-c.o src/bison-scan-skel-c.o src/bison-state.o src/bison-state-item.o src/bison-strversion.o src/bison-symlist.o src/bison-symtab.o src/bison-tables.o src/bison-uniqstr.o lib/libbison.a -/bin/mkdir -p doc +/usr/bin/mkdir -p doc LC_ALL=C tests/bison --version >doc/bison.help.tmp LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help -if /bin/bash '/build/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then \ - /bin/bash '/build/bison-3.8.2+dfsg/build-aux/missing' help2man \ +if /bin/sh '/build/bison-3.8.2+dfsg/build-aux/missing' help2man --version >/dev/null 2>&1; then \ + /bin/sh '/build/bison-3.8.2+dfsg/build-aux/missing' help2man \ --include=./doc/bison.x \ --output=doc/bison.1.tmp tests/bison && \ { sed 's/^\(\.TH[^"]*"[^"]*"[^"]*\)"[^"]*"/\1/' doc/bison.1 >doc/bison.1a.tmp || true; } && \ @@ -2728,7 +2767,7 @@ make[2]: Leaving directory '/build/bison-3.8.2+dfsg' make[1]: Leaving directory '/build/bison-3.8.2+dfsg' dh_auto_test - make -j16 check "TESTSUITEFLAGS=-j16 --verbose" VERBOSE=1 + make -j15 check "TESTSUITEFLAGS=-j15 --verbose" VERBOSE=1 make[1]: Entering directory '/build/bison-3.8.2+dfsg' if test -d ./.git \ && git --version >/dev/null 2>&1; then \ @@ -2757,7 +2796,7 @@ make[3]: Leaving directory '/build/bison-3.8.2+dfsg/gnulib-po' Making check in . make[3]: Entering directory '/build/bison-3.8.2+dfsg' -/bin/mkdir -p doc +/usr/bin/mkdir -p doc LC_ALL=C tests/bison --version >doc/bison.help.tmp LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ @@ -2765,82 +2804,82 @@ ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help make examples/c/calc/calc examples/c/glr/c++-types examples/c/lexcalc/lexcalc examples/c/mfcalc/mfcalc examples/c/pushcalc/calc examples/c/reccalc/reccalc examples/c/rpcalc/rpcalc examples/c++/calc++/calc++ examples/c++/glr/c++-types examples/c++/simple examples/c++/variant examples/c++/variant-11 ./tests/bison tests/atconfig tests/atlocal make[4]: Entering directory '/build/bison-3.8.2+dfsg' -/bin/bash ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/calc/calc.y y.tab.c examples/c/calc/calc.c y.tab.h `echo examples/c/calc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/calc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/glr/c++-types.y y.tab.c examples/c/glr/c++-types.c y.tab.h `echo examples/c/glr/c++-types.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/glr/c++-types.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/lexcalc/parse.y y.tab.c examples/c/lexcalc/parse.c y.tab.h `echo examples/c/lexcalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/lexcalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex -/bin/bash ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -/bin/bash ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c/lexcalc/scan.l' || echo './'`examples/c/lexcalc/scan.l lex.yy.c examples/c/lexcalc/scan.c -- flex +/bin/sh ./build-aux/ylwrap examples/c/mfcalc/mfcalc.y y.tab.c examples/c/mfcalc/mfcalc.c y.tab.h `echo examples/c/mfcalc/mfcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/mfcalc/mfcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/pushcalc/calc.y y.tab.c examples/c/pushcalc/calc.c y.tab.h `echo examples/c/pushcalc/calc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/pushcalc/calc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/reccalc/parse.y y.tab.c examples/c/reccalc/parse.c y.tab.h `echo examples/c/reccalc/parse.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/reccalc/parse.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap examples/c/rpcalc/rpcalc.y y.tab.c examples/c/rpcalc/rpcalc.c y.tab.h `echo examples/c/rpcalc/rpcalc.c | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c/rpcalc/rpcalc.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines rm -f examples/c++/calc++/parser.stamp \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex -rm -f examples/c++/glr/c++-types.stamp +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/calc++/scanner.ll' || echo './'`examples/c++/calc++/scanner.ll lex.yy.c examples/c++/calc++/scanner.cc -- flex touch examples/c++/calc++/parser.stamp.tmp -touch examples/c++/glr/c++-types.stamp.tmp -\ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +rm -f examples/c++/glr/c++-types.stamp ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -o examples/c++/calc++/parser.cc examples/c++/calc++/parser.yy +touch examples/c++/glr/c++-types.stamp.tmp ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines -o examples/c++/glr/c++-types.cc examples/c++/glr/c++-types.yy \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/simple.yy' || echo './'`examples/c++/simple.yy y.tab.c examples/c++/simple.cc y.tab.h `echo examples/c++/simple.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/simple.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +\ +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant.yy' || echo './'`examples/c++/variant.yy y.tab.c examples/c++/variant.cc y.tab.h `echo examples/c++/variant.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines \ -/bin/bash ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines +/bin/sh ./build-aux/ylwrap `test -f 'examples/c++/variant-11.yy' || echo './'`examples/c++/variant-11.yy y.tab.c examples/c++/variant-11.cc y.tab.h `echo examples/c++/variant-11.cc | sed -e s/cc$/hh/ -e s/cpp$/hpp/ -e s/cxx$/hxx/ -e s/c++$/h++/ -e s/c$/h/` y.output examples/c++/variant-11.output -- ./tests/bison -o y.tab.c --defines -Werror -Wall,dangling-alias --report=all --no-lines make[4]: 'tests/bison' is up to date. make[4]: Nothing to be done for 'tests/atconfig'. make[4]: 'tests/atlocal' is up to date. -updating examples/c/calc/calc.output mv -f examples/c++/calc++/parser.stamp.tmp examples/c++/calc++/parser.stamp -updating examples/c/glr/c++-types.output +updating examples/c/calc/calc.output g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-driver.o `test -f 'examples/c++/calc++/driver.cc' || echo './'`examples/c++/calc++/driver.cc g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-scanner.o `test -f 'examples/c++/calc++/scanner.cc' || echo './'`examples/c++/calc++/scanner.cc -updating examples/c/reccalc/parse.output g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-calc++.o `test -f 'examples/c++/calc++/calc++.cc' || echo './'`examples/c++/calc++/calc++.cc g++ -DEXEEXT=\"\" -I./examples/c++/calc++ -I./examples/c++/calc++ -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/calc++/calc__-parser.o `test -f 'examples/c++/calc++/parser.cc' || echo './'`examples/c++/calc++/parser.cc -updating examples/c/rpcalc/rpcalc.output -updating examples/c/pushcalc/calc.output updating examples/c/lexcalc/parse.output updating examples/c/calc/calc.h -updating examples/c/glr/c++-types.h -updating examples/c/pushcalc/calc.h gcc -DEXEEXT=\"\" -I./examples/c/calc -I./examples/c/calc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/calc/examples_c_calc_calc-calc.o `test -f 'examples/c/calc/calc.c' || echo './'`examples/c/calc/calc.c -gcc -DEXEEXT=\"\" -I./examples/c/glr -I./examples/c/glr -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/glr/examples_c_glr_c___types-c++-types.o `test -f 'examples/c/glr/c++-types.c' || echo './'`examples/c/glr/c++-types.c -gcc -DEXEEXT=\"\" -I./examples/c/pushcalc -I./examples/c/pushcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/pushcalc/examples_c_pushcalc_calc-calc.o `test -f 'examples/c/pushcalc/calc.c' || echo './'`examples/c/pushcalc/calc.c +updating examples/c/glr/c++-types.output mv -f examples/c++/glr/c++-types.stamp.tmp examples/c++/glr/c++-types.stamp -updating examples/c/rpcalc/rpcalc.h -gcc -DEXEEXT=\"\" -I./examples/c/rpcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o `test -f 'examples/c/rpcalc/rpcalc.c' || echo './'`examples/c/rpcalc/rpcalc.c updating examples/c/lexcalc/parse.h -updating examples/c/mfcalc/mfcalc.output +updating examples/c/pushcalc/calc.output +updating examples/c/rpcalc/rpcalc.output gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o `test -f 'examples/c/lexcalc/parse.c' || echo './'`examples/c/lexcalc/parse.c +updating examples/c/glr/c++-types.h gcc -DEXEEXT=\"\" -I./examples/c/lexcalc -I./examples/c/lexcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o `test -f 'examples/c/lexcalc/scan.c' || echo './'`examples/c/lexcalc/scan.c -updating examples/c++/simple.output g++ -DEXEEXT=\"\" -I./examples/c++/glr -I./examples/c++/glr -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++14 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/glr/examples_c___glr_c___types-c++-types.o `test -f 'examples/c++/glr/c++-types.cc' || echo './'`examples/c++/glr/c++-types.cc +updating examples/c/mfcalc/mfcalc.output +updating examples/c/reccalc/parse.output +updating examples/c/pushcalc/calc.h +updating examples/c/rpcalc/rpcalc.h +gcc -DEXEEXT=\"\" -I./examples/c/glr -I./examples/c/glr -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/glr/examples_c_glr_c___types-c++-types.o `test -f 'examples/c/glr/c++-types.c' || echo './'`examples/c/glr/c++-types.c +updating examples/c++/simple.output +gcc -DEXEEXT=\"\" -I./examples/c/pushcalc -I./examples/c/pushcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/pushcalc/examples_c_pushcalc_calc-calc.o `test -f 'examples/c/pushcalc/calc.c' || echo './'`examples/c/pushcalc/calc.c updating examples/c/reccalc/parse.h -gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-parse.o `test -f 'examples/c/reccalc/parse.c' || echo './'`examples/c/reccalc/parse.c updating examples/c/mfcalc/mfcalc.h +gcc -DEXEEXT=\"\" -I./examples/c/rpcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o `test -f 'examples/c/rpcalc/rpcalc.c' || echo './'`examples/c/rpcalc/rpcalc.c +gcc -DEXEEXT=\"\" -I./examples/c/mfcalc -I./examples/c/mfcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o `test -f 'examples/c/mfcalc/mfcalc.c' || echo './'`examples/c/mfcalc/mfcalc.c +updating examples/c++/simple.hh updating examples/c++/variant.output -gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o `test -f 'examples/c/reccalc/scan.c' || echo './'`examples/c/reccalc/scan.c +gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-parse.o `test -f 'examples/c/reccalc/parse.c' || echo './'`examples/c/reccalc/parse.c updating examples/c++/variant-11.output -updating examples/c++/simple.hh -gcc -DEXEEXT=\"\" -I./examples/c/mfcalc -I./examples/c/mfcalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o `test -f 'examples/c/mfcalc/mfcalc.c' || echo './'`examples/c/mfcalc/mfcalc.c updating examples/c++/variant.hh +gcc -DEXEEXT=\"\" -I./examples/c/reccalc -I./examples/c/reccalc -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o `test -f 'examples/c/reccalc/scan.c' || echo './'`examples/c/reccalc/scan.c updating examples/c++/variant-11.hh g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/simple-simple.o `test -f 'examples/c++/simple.cc' || echo './'`examples/c++/simple.cc g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/variant-variant.o `test -f 'examples/c++/variant.cc' || echo './'`examples/c++/variant.cc g++ -DEXEEXT=\"\" -I. -Wdate-time -D_FORTIFY_SOURCE=2 -std=c++11 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -c -o examples/c++/variant_11-variant-11.o `test -f 'examples/c++/variant-11.cc' || echo './'`examples/c++/variant-11.cc gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/calc/calc examples/c/calc/examples_c_calc_calc-calc.o -gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/rpcalc/rpcalc examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o -lm -gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/lexcalc/lexcalc examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/pushcalc/calc examples/c/pushcalc/examples_c_pushcalc_calc-calc.o -gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/reccalc/reccalc examples/c/reccalc/examples_c_reccalc_reccalc-parse.o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o +gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/rpcalc/rpcalc examples/c/rpcalc/examples_c_rpcalc_rpcalc-rpcalc.o -lm gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/mfcalc/mfcalc examples/c/mfcalc/examples_c_mfcalc_mfcalc-mfcalc.o -lm +gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/reccalc/reccalc examples/c/reccalc/examples_c_reccalc_reccalc-parse.o examples/c/reccalc/examples_c_reccalc_reccalc-scan.o +gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/lexcalc/lexcalc examples/c/lexcalc/examples_c_lexcalc_lexcalc-parse.o examples/c/lexcalc/examples_c_lexcalc_lexcalc-scan.o gcc -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c/glr/c++-types examples/c/glr/examples_c_glr_c___types-c++-types.o g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/simple examples/c++/simple-simple.o g++ -std=c++11 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/variant-11 examples/c++/variant_11-variant-11.o -g++ -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/variant examples/c++/variant-variant.o g++ -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/calc++/calc++ examples/c++/calc++/calc__-driver.o examples/c++/calc++/calc__-scanner.o examples/c++/calc++/calc__-calc++.o examples/c++/calc++/calc__-parser.o +g++ -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/variant examples/c++/variant-variant.o g++ -std=c++14 -g -O2 -ffile-prefix-map=/build/bison-3.8.2+dfsg=. -fstack-protector-strong -Wformat -Werror=format-security -Wl,-z,relro -Wl,-z,now -o examples/c++/glr/c++-types examples/c++/glr/examples_c___glr_c___types-c++-types.o make[4]: Leaving directory '/build/bison-3.8.2+dfsg' make check-TESTS check-local @@ -2857,7 +2896,7 @@ } >tests/package.m4.tmp mv tests/package.m4.tmp tests/package.m4 \ - /bin/bash '/build/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp + /bin/sh '/build/bison-3.8.2+dfsg/build-aux/missing' autom4te --language=autotest -I ./tests ./tests/testsuite.at -o tests/testsuite.tmp make[5]: Entering directory '/build/bison-3.8.2+dfsg' make[5]: Entering directory '/build/bison-3.8.2+dfsg' Making all in po @@ -2877,22 +2916,22 @@ PASS: examples/c/glr/c++-types.test PASS: examples/c++/variant.test PASS: examples/c++/simple.test +/usr/bin/mkdir -p doc +LC_ALL=C tests/bison --version >doc/bison.help.tmp PASS: examples/c++/variant-11.test PASS: examples/c++/glr/c++-types.test -/bin/mkdir -p doc -PASS: examples/c/mfcalc/mfcalc.test -PASS: examples/c/pushcalc/calc.test -LC_ALL=C tests/bison --version >doc/bison.help.tmp -PASS: examples/c/lexcalc/lexcalc.test -PASS: examples/c/rpcalc/rpcalc.test LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp -PASS: examples/c/calc/calc.test -PASS: examples/c/reccalc/reccalc.test ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help make[6]: Leaving directory '/build/bison-3.8.2+dfsg' make[5]: Leaving directory '/build/bison-3.8.2+dfsg' +PASS: examples/c/mfcalc/mfcalc.test +PASS: examples/c/calc/calc.test +PASS: examples/c/pushcalc/calc.test +PASS: examples/c/rpcalc/rpcalc.test +PASS: examples/c/lexcalc/lexcalc.test +PASS: examples/c/reccalc/reccalc.test PASS: examples/c++/calc++/calc++.test ============================================================================ Testsuite summary for GNU Bison 3.8.2 @@ -2908,7 +2947,7 @@ make[5]: Leaving directory '/build/bison-3.8.2+dfsg' "/usr/bin/perl" -pi -e 's/\@tb\@/\t/g' tests/testsuite.tmp mv tests/testsuite.tmp tests/testsuite -/bin/bash ./tests/testsuite -C tests -j16 --verbose +/bin/sh ./tests/testsuite -C tests -j15 --verbose ## --------------------------- ## ## GNU Bison 3.8.2 test suite. ## ## --------------------------- ## @@ -2927,45 +2966,41 @@ +1. m4.at:21: testing Generating Comments ... +2. input.at:27: testing Invalid number of arguments ... +./input.at:29: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 4. input.at:83: testing Invalid inputs ... - +5. input.at:147: testing Invalid inputs with {} ... 7. input.at:204: testing Yacc warnings ... +./input.at:162: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./m4.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -S ./input.m4 input.y ./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y +15. input.at:774: testing Unused values ... 6. input.at:173: testing Yacc warnings on symbols ... -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y -2. input.at:27: testing Invalid number of arguments ... -./input.at:29: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret +3. input.at:58: testing Invalid options ... ./input.at:97: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' input.y || exit 77 -5. input.at:147: testing Invalid inputs with {} ... -1. m4.at:21: testing Generating Comments ... +9. input.at:287: testing Invalid symbol declarations ... 13. input.at:528: testing Invalid $n and @n ... ./input.at:536: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -15. input.at:774: testing Unused values ... +11. input.at:401: testing Dangling aliases ... +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wdangling input.y +./input.at:67: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -ferror=caret input.y +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y +12. input.at:427: testing Symbol declarations ... 14. input.at:552: testing Type Clashes ... ./input.at:565: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:162: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y 2.y ./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y 10. input.at:341: testing Redefining the error token ... ./input.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y 2.y -11. input.at:401: testing Dangling aliases ... -./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -12. input.at:427: testing Symbol declarations ... -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wdangling input.y -3. input.at:58: testing Invalid options ... -16. input.at:784: testing Unused values before symbol declarations ... -./m4.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -S ./input.m4 input.y -./input.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S./dump-symbols.m4 input.y -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:67: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -ferror=caret input.y 8. input.at:238: testing Yacc's %type ... ./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wyacc input.y -9. input.at:287: testing Invalid symbol declarations ... ./input.at:304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -14. input.at:552: ok -./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --skeleton -stderr: +./input.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S./dump-symbols.m4 input.y +./input.at:99: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +5. input.at:147: ok stderr: +13. input.at:528: ok bison: invalid argument 'error=caret' for '--feature' Valid arguments are: - 'none' @@ -2974,6 +3009,12 @@ - 'syntax-only' - 'all' ./input.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=error=itemsets input.y +14. input.at:552: ok +./m4.at:55: cat output.txt +stderr: +./input.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --skeleton +9. input.at:287: ok +stderr: input.y:1.11: error: invalid null character 1 | %header "ð€ˆ" | ^ @@ -3007,17 +3048,7 @@ input.y:10.1-11.0: error: missing '%}' at end of file 10 | %{ | ^~ -13. input.at:528: ok ./input.at:104: "$PERL" -p -e 's{([\0\200\210\360\377])}{sprintf "\\x%02x", ord($1)}ge' stderr -./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -bison: option '--skeleton' requires an argument -Try 'bison --help' for more information. -stderr: -./input.at:43: sed -e \ - "s/requires an argument -- skeleton/'--skeleton' requires an argument/" \ - stderr -9. input.at:287: ok bison: invalid argument 'error=itemsets' for '--report' Valid arguments are: - 'none' @@ -3028,12 +3059,11 @@ - 'counterexamples', 'cex' - 'all' ./input.at:72: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror?all input.y -5. input.at:147: ok -./m4.at:55: cat output.txt +./input.at:390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + stderr: -2. input.at:27: ok -4. input.at:83: ok +stderr: bison: invalid argument 'error?all' for '--warning' Valid arguments are: - 'all' @@ -3049,56 +3079,55 @@ - 'other' - 'precedence' - 'yacc' - -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror +bison: option '--skeleton' requires an argument +Try 'bison --help' for more information. 3. input.at:58: ok -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror - - +./input.at:43: sed -e \ + "s/requires an argument -- skeleton/'--skeleton' requires an argument/" \ + stderr 1. m4.at:21: ok -18. input.at:832: testing EOF redeclared ... +4. input.at:83: ok + ./input.at:468: cat symbols.csv -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +16. input.at:784: testing Unused values before symbol declarations ... +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +2. input.at:27: ok -20. input.at:899: testing Default %printer and %destructor redeclared ... -./input.at:959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Werror 17. input.at:794: testing Symbol redeclared ... +12. input.at:427: ok + ./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +18. input.at:832: testing EOF redeclared ... +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Werror +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror + +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror +./input.at:390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +20. input.at:899: testing Default %printer and %destructor redeclared ... 19. input.at:859: testing Symbol class redefinition ... +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror + +./input.at:959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y 22. input.at:1013: testing Undefined symbols ... ./input.at:1023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -12. input.at:427: ok -./input.at:871: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -stderr: -input.y:1.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] - 1 | %nterm exp - | ^~~~~~ -input.y:2.12-15: error: POSIX Yacc does not support hexadecimal literals [-Werror=yacc] - 2 | %token NUM 0x40 "number" - | ^~~~ -input.y:2.17-24: error: POSIX Yacc does not support string literals [-Werror=yacc] - 2 | %token NUM 0x40 "number" - | ^~~~~~~~ -input.y:4.6-13: error: POSIX Yacc does not support string literals [-Werror=yacc] - 4 | exp: "number"; - | ^~~~~~~~ + 21. input.at:970: testing Per-type %printer and %destructor redeclared ... ./input.at:987: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - - -20. input.at:899: ok -./input.at:390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./input.at:182: sed 's,.*/$,,' stderr 1>&2 +23. input.at:1045: testing Unassociated types used for a printer or destructor ... +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y 19. input.at:859: ok -21. input.at:970: ok -22. input.at:1013: ok +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror 24. input.at:1074: testing Useless printers or destructors ... -stderr: -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Werror ./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +22. input.at:1013: ok +20. input.at:899: ok +21. input.at:970: ok +stderr: +stderr: input.y:1.1-11: error: POSIX Yacc does not support %destructor [-Werror=yacc] 1 | %destructor {} | ^~~~~~~~~~~ @@ -3111,38 +3140,37 @@ input.y:7.4-9: error: POSIX Yacc does not support %empty [-Werror=yacc] 7 | b: %empty { $$ = 42; }; | ^~~~~~ -23. input.at:1045: testing Unassociated types used for a printer or destructor ... -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror - -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error - -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror - - -./input.at:216: sed 's,.*/$,,' stderr 1>&2 -25. input.at:1139: testing Unused values with default %destructor ... -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -stderr: - input.y:2.13-17: error: string literal "bar" not attached to a symbol [-Werror=dangling-alias] 2 | %type "bar" | ^~~~~ input.y:4.19-23: error: string literal "baz" not attached to a symbol [-Werror=dangling-alias] 4 | expr: "foo" "bar" "baz" | ^~~~~ + + +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +stderr: +input.y:1.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] + 1 | %nterm exp + | ^~~~~~ +input.y:2.12-15: error: POSIX Yacc does not support hexadecimal literals [-Werror=yacc] + 2 | %token NUM 0x40 "number" + | ^~~~ +input.y:2.17-24: error: POSIX Yacc does not support string literals [-Werror=yacc] + 2 | %token NUM 0x40 "number" + | ^~~~~~~~ +input.y:4.6-13: error: POSIX Yacc does not support string literals [-Werror=yacc] + 4 | exp: "number"; + | ^~~~~~~~ +./input.at:410: sed 's,.*/$,,' stderr 1>&2 + +./input.at:216: sed 's,.*/$,,' stderr 1>&2 26. input.at:1187: testing Unused values with per-type %destructor ... ./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -28. input.at:1247: testing Token collisions ... -./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -29. input.at:1275: testing Incompatible Aliases ... -./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -stderr: -27. input.at:1219: testing Duplicate string ... -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y + stderr: -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +25. input.at:1139: testing Unused values with default %destructor ... +./input.at:182: sed 's,.*/$,,' stderr 1>&2 input.y:2.1-6: error: POSIX Yacc does not support %nterm [-Werror=yacc] 2 | %nterm nterm1 | ^~~~~~ @@ -3167,18 +3195,26 @@ input.y:10.9-16: error: POSIX Yacc does not support string literals [-Werror=yacc] 10 | nterm3: "TOKEN3" | ^~~~~~~~ +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +stderr: +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=error +27. input.at:1219: testing Duplicate string ... ./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y input.y:1.16-18: error: symbol FOO redeclared [-Werror=other] 1 | %token FOO BAR FOO 0 | ^~~ input.y:1.8-10: note: previous declaration 1 | %token FOO BAR FOO 0 | ^~~ -./input.at:410: sed 's,.*/$,,' stderr 1>&2 -28. input.at:1247: ok -./input.at:253: sed 's,.*/$,,' stderr 1>&2 -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror stderr: +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:253: sed 's,.*/$,,' stderr 1>&2 +28. input.at:1247: testing Token collisions ... +./input.at:1256: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:843: sed 's,.*/$,,' stderr 1>&2 +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error input.y:1.12-14: error: symbol FOO redeclared [-Werror=other] 1 | %token FOO FOO | ^~~ @@ -3197,29 +3233,26 @@ input.y:3.8-10: note: previous declaration 3 | %token EOF 0 EOF 0 | ^~~ -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:843: sed 's,.*/$,,' stderr 1>&2 +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error +28. input.at:1247: ok ./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error ./input.at:804: sed 's,.*/$,,' stderr 1>&2 -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=error +stderr: ./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=error -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y - +input.y:4.22-28: error: type is used, but is not associated to any symbol [-Werror=other] +input.y:5.25-31: error: type is used, but is not associated to any symbol [-Werror=other] +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror ./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error ./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Werror -stderr: -stdout: -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none -./input.at:391: $PREPARSER ./input -30. input.at:1400: testing Torturing the Scanner ... -./input.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -stderr: -./input.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:1062: sed 's,.*/$,,' stderr 1>&2 +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Wnone,none -Werror --trace=none +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +29. input.at:1275: testing Incompatible Aliases ... +./input.at:1285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y stderr: input.y:6.8-22: error: unset value: $$ [-Werror=other] 6 | start: end end { $1; } ; @@ -3230,15 +3263,32 @@ input.y:7.6-8: error: unset value: $$ [-Werror=other] 7 | end: { } ; | ^~~ +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none stderr: -./input.at:1327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -stderr: +./input.at:1299: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y input.y:16.13-19: error: useless %printer for type [-Werror=other] input.y:17.16-22: error: useless %destructor for type [-Werror=other] +./input.at:1199: sed 's,.*/$,,' stderr 1>&2 +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none stderr: -input.y:4.22-28: error: type is used, but is not associated to any symbol [-Werror=other] -input.y:5.25-31: error: type is used, but is not associated to any symbol [-Werror=other] -./input.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none +./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +./input.at:1085: sed 's,.*/$,,' stderr 1>&2 +stdout: +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +stderr: +./input.at:391: $PREPARSER ./input +input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other] +stderr: +./input.at:1313: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none +./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +stderr: +./input.at:1236: sed 's,.*/$,,' stderr 1>&2 +stderr: +10. input.at:341: ok input.y:6.8-45: error: unset value: $$ [-Werror=other] 6 | start: end end tagged tagged { $1; $3; } ; | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -3248,35 +3298,7 @@ input.y:7.6-8: error: unset value: $$ [-Werror=other] 7 | end: { } ; | ^~~ -10. input.at:341: ok -./input.at:1085: sed 's,.*/$,,' stderr 1>&2 -./input.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -stderr: -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:1152: sed 's,.*/$,,' stderr 1>&2 -./input.at:1062: sed 's,.*/$,,' stderr 1>&2 ./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:1359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y -Wnone,none -Werror --trace=none -./input.at:1199: sed 's,.*/$,,' stderr 1>&2 -input.y:6.11-14: error: symbol "<=" used more than once as a literal string [-Werror=other] -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error - -./input.at:1236: sed 's,.*/$,,' stderr 1>&2 -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -31. input.at:1569: testing Typed symbol aliases ... -./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./input.at:1555: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=error -./input.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wdangling input.y --warnings=none -Werror --trace=none -./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -29. input.at:1275: ok -stderr: -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y -Wnone,none -Werror --trace=none input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; | ^~~~~~~~~~~~~~~~~~~~~~~ @@ -3382,27 +3404,34 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ -6. input.at:173: ok -./input.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=error +./input.at:1327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y ./input.at:785: sed 's,.*/$,,' stderr 1>&2 +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1152: sed 's,.*/$,,' stderr 1>&2 -./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -31. input.at:1569: ok +./input.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y ./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -7. input.at:204: ok +11. input.at:401: ok +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:216: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none +./input.at:804: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +30. input.at:1400: testing Torturing the Scanner ... +./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +6. input.at:173: ok +./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none ./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y -Wnone,none -Werror --trace=none -32. input.at:1609: testing Require 1.0 ... -17. input.at:794: ok -33. input.at:1610: testing Require 3.8.2 ... -./input.at:1609: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -stderr: +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y 18. input.at:832: ok -11. input.at:401: ok -./input.at:1062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +31. input.at:1569: testing Typed symbol aliases ... +./input.at:1586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; | ^~~~~~~~~~~~~~~~~~~~~~~ @@ -3508,97 +3537,99 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ -./input.at:1610: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - +7. input.at:204: ok +29. input.at:1275: ok +17. input.at:794: ok ./input.at:775: sed 's,.*/$,,' stderr 1>&2 -./input.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wyacc input.y --warnings=none -Werror --trace=none -./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -34. input.at:1612: testing Require 100.0 ... -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:1612: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y ./input.at:1199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +23. input.at:1045: ok +32. input.at:1609: testing Require 1.0 ... -stderr: -input.y:9.10-16: error: require bison 100.0, but have 3.8.2 +./input.at:1609: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + + +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:1555: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c + +31. input.at:1569: ok + +33. input.at:1610: testing Require 3.8.2 ... +./input.at:1610: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +34. input.at:1612: testing Require 100.0 ... +8. input.at:238: ok 35. input.at:1619: testing String aliases for character tokens ... ./input.at:1632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./input.at:1236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -o input.c input.y --warnings=none -Werror --trace=none -34. input.at:1612: ok -stderr: +./input.at:1612: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none 36. input.at:1642: testing Symbols ... +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc input.y +stderr: +input.y:9.10-16: error: require bison 100.0, but have 3.8.2 +34. input.at:1612: ok -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -32. input.at:1609: ok 37. input.at:1708: testing Numbered tokens ... +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none ./input.at:1720: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret redecl.y -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc input.y stderr: +stderr: +26. input.at:1187: ok + 33. input.at:1610: ok -8. input.at:238: ok +32. input.at:1609: ok 38. input.at:1750: testing Unclosed constructs ... -26. input.at:1187: ok 35. input.at:1619: ok ./input.at:1779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y - - +27. input.at:1219: ok +./input.at:1152: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none 39. input.at:1805: testing %start after first rule ... ./input.at:1817: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +38. input.at:1750: ok -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +40. input.at:1826: testing Duplicate %start symbol ... + +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./input.at:1735: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret too-large.y -23. input.at:1045: 40. input.at:1826: testing Duplicate %start symbol ... - ok 41. input.at:1895: testing %prec takes a token ... ./input.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -38. input.at:1750: ok -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -27. input.at:1219: ok -41. input.at:1895: ok + +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y 42. input.at:1916: testing %prec's token must be defined ... -37. input.at:1708: ok -43. input.at:1936: testing Reject unused %code qualifiers ... -./input.at:1946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c.y ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -44. input.at:2025: testing Multiple %code ... -./input.at:2054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Werror -./input.at:1085: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none - - -39. input.at:1805: ok +43. input.at:1936: testing Reject unused %code qualifiers ... +39. input.at:1805: ./input.at:1946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c.y + ok +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +37. input.at:1708: ok +41. input.at:1895: ok +44. input.at:2025: testing Multiple %code ... +45. input.at:2065: testing errors ... +./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-redefined.y +./input.at:2054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +46. input.at:2102: testing %define, --define, --force-define ... +./input.at:2118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dvar-dd=cmd-d1 -Dvar-dd=cmd-d2 \ + -Fvar-ff=cmd-f1 -Fvar-ff=cmd-f2 \ + -Dvar-dfg=cmd-d -Fvar-dfg=cmd-f \ + -Fvar-fd=cmd-f -Dvar-fd=cmd-d \ + --skeleton ./skel.c input.y +./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-unused.y ./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror 47. input.at:2170: testing "%define" Boolean variables ... ./input.at:2180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret Input.y -49. input.at:2224: testing "%define" keyword variables ... -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -stderr: +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2123: cat input.tab.c 48. input.at:2191: testing "%define" code variables ... -46. input.at:2102: testing %define, --define, --force-define ... ./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy -stdout: -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -45. input.at:2065: testing errors ... -./input.at:2077: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-redefined.y -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:2091: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-unused.y -50. input.at:2257: testing "%define" enum variables ... -./input.at:2269: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:2118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dvar-dd=cmd-d1 -Dvar-dd=cmd-d2 \ - -Fvar-ff=cmd-f1 -Fvar-ff=cmd-f2 \ - -Dvar-dfg=cmd-d -Fvar-dfg=cmd-f \ - -Fvar-fd=cmd-f -Dvar-fd=cmd-d \ - --skeleton ./skel.c input.y -./input.at:1556: $CC $CFLAGS $CPPFLAGS -c -o main.o main.c -./input.at:2054: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: input.y:1.1-5: error: POSIX Yacc does not support %code [-Werror=yacc] input.y:9.8-16: error: POSIX Yacc forbids dashes in symbol names: WITH-DASH [-Werror=yacc] @@ -3608,8 +3639,19 @@ input.y:20.8-16: error: POSIX Yacc forbids dashes in symbol names: with-dash [-Werror=yacc] input.y:22.15-28: error: POSIX Yacc does not support string literals [-Werror=yacc] input.y:24.17-32: error: POSIX Yacc does not support string literals [-Werror=yacc] +49. input.at:2224: testing "%define" keyword variables ... +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dvar=cmd-d input-dg.y +./input.at:1960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c-glr.y +./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./input.at:2054: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./input.at:1666: sed 's,.*/$,,' stderr 1>&2 +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror stderr: -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=error input.y:1.12-14: error: duplicate directive [-Werror=other] 1 | %start exp exp exp | ^~~ @@ -3623,193 +3665,195 @@ 1 | %start exp exp exp | ^~~ input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1666: sed 's,.*/$,,' stderr 1>&2 -./input.at:2123: cat input.tab.c -./input.at:1960: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c-glr.y -stderr: -./input.at:1836: sed 's,.*/$,,' stderr 1>&2 -input.y:6.23-28: error: unused value: $4 [-Werror=other] -input.y:8.9-11: error: unset value: $$ [-Werror=other] +45. input.at:2065: ok 47. input.at:2170: ok -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=error -./input.at:2303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:2135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dvar=cmd-d input-dg.y -./input.at:1175: sed 's,.*/$,,' stderr 1>&2 +./input.at:1836: sed 's,.*/$,,' stderr 1>&2 +./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y +stderr: +stderr: +input.y:3.13-14: error: useless %printer for type <> [-Werror=other] +stdout: + ./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:1556: $CC $CFLAGS $CPPFLAGS -c -o main.o main.c +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +stderr: stderr: +input.y:6.23-28: error: unused value: $4 [-Werror=other] +input.y:8.9-11: error: unset value: $$ [-Werror=other] input.y:2.8-17: error: token for %prec is not defined: PREC [-Werror=other] -45. input.at:2065: ok +./input.at:1116: sed 's,.*/$,,' stderr 1>&2 ./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror -stderr: -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:2146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Dvar=cmd-d input-dg.y -./input.at:1925: sed 's,.*/$,,' stderr 1>&2 51. input.at:2320: testing "%define" file variables ... -stdout: +50. input.at:2257: testing "%define" enum variables ... +./input.at:2269: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y ./input.at:2329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1175: sed 's,.*/$,,' stderr 1>&2 +./input.at:1925: sed 's,.*/$,,' stderr 1>&2 +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Wnone,none -Werror --trace=none +./input.at:1973: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++.y +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:2284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +46. input.at:2102: ok ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error - -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +stderr: +stdout: +./input.at:2303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none ./input.at:1557: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.o main.o $LIBS -./input.at:2158: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dunused-d -Funused-f input-unused.y -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none stderr: input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] -52. input.at:2342: testing "%define" backward compatibility ... -./input.at:2355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y -Wnone,none -Werror --trace=none +./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none + +51. input.at:2320: ok ./input.at:2246: sed 's,.*/$,,' stderr 1>&2 -stderr: -./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -input.y:3.13-14: error: useless %printer for type <> [-Werror=other] -stderr: -./input.at:1973: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++.y -52. input.at:2342: ok -stdout: -./input.at:785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:1558: $PREPARSER ./input -50. input.at:2257: ok ./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:1116: sed 's,.*/$,,' stderr 1>&2 +./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +52. input.at:2342: testing "%define" backward compatibility ... +./input.at:2355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y stderr: -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error stderr: input.yy:2.1-30: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] input.yy:4.1-30: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] input.yy:5.1-30: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] input.yy:3.1-30: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] -51. input.at:2320: ok -30. input.at:1400: ok -./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none - +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +stdout: +./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +./input.at:2055: $PREPARSER ./input +52. input.at:2342: ok +stderr: stderr: +./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++-glr.y +./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./input.at:2213: sed 's,.*/$,,' stderr 1>&2 +./input.at:1836: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none stdout: -46. input.at:2102: ok +./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./input.at:1558: $PREPARSER ./input +./input.at:2213: sed 's,.*/$,,' stderr 1>&2 +stderr: 53. input.at:2393: testing Unused api.pure ... ./input.at:2413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2055: $PREPARSER ./input -./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -stderr: -./input.at:2055: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +44. input.at:2025: ok +./input.at:1558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error ./input.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +50. input.at:2257: ok +./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +30. input.at:1400: ok 54. input.at:2429: testing C++ namespace reference errors ... ./input.at:2450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1925: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error -./input.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:1666: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --yacc input.y --warnings=none -Werror --trace=none -44. input.at:2025: ok -56. input.at:2543: testing Bad escapes in literals ... -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y 55. input.at:2482: testing Bad character literals ... + +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1681: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +56. input.at:2543: testing Bad escapes in literals ... +./input.at:2556: "$PERL" -e 'print "start: \"\\\t\\\f\\\0\\\1\" ;";' >> input.y || exit 77 ./input.at:2484: set x `LC_ALL=C ls -l 'empty.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='empty.y'; } || exit 77 -./input.at:2556: "$PERL" -e 'print "start: \"\\\t\\\f\\\0\\\1\" ;";' >> input.y || exit 77 -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./input.at:2558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - -56. input.at:2543: ok +./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y 57. input.at:2582: testing Unexpected end of file ... ./input.at:2586: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +42. input.at:1916: ok +./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-@@.y stderr: +./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none 0+0 records in 0+0 records out -0 bytes copied, 6.6296e-05 s, 0.0 kB/s -./input.at:1986: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-c++-glr.y +0 bytes copied, 4.9392e-05 s, 0.0 kB/s stdout: ./input.at:2490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret empty.y -58. input.at:2675: testing LAC: Errors for %define ... -42. input.at:1916: ok +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +25. input.at:1139: ok +56. input.at:2543: ok ./input.at:2591: set x `LC_ALL=C ls -l 'char.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='char.y'; } || exit 77 -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=none input.y -./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none +./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -25. input.at:1139: ok -./input.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./input.at:2508: set x `LC_ALL=C ls -l 'two.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='two.y'; } || exit 77 -./input.at:2452: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +stderr: -./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none stderr: 0+0 records in 0+0 records out -0 bytes copied, 6.1285e-05 s, 0.0 kB/s +0 bytes copied, 5.9035e-05 s, 0.0 kB/s stdout: -59. input.at:2719: testing -Werror combinations ... -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y -./input.at:2514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret two.y -stderr: +./input.at:2246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none 0+0 records in 0+0 records out -0 bytes copied, 5.6632e-05 s, 0.0 kB/s +0 bytes copied, 7.3142e-05 s, 0.0 kB/s + stdout: -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -./input.at:2594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret char.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y ./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -60. input.at:2764: testing %name-prefix and api.prefix are incompatible ... - -./input.at:2522: -set x `LC_ALL=C ls -l 'three.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='three.y'; } || exit 77 -./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-deprecated input.y +./input.at:2594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret char.y +./input.at:2514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret two.y +58. input.at:2675: testing LAC: Errors for %define ... +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=none input.y +stderr: +59. input.at:2719: testing -Werror combinations ... +input.y:2.16-18: error: useless %printer for type <*> [-Werror=other] +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y +./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-].y +./input.at:1124: sed 's,.*/$,,' stderr 1>&2 ./input.at:2604: set x `LC_ALL=C ls -l 'escape-in-char.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-char.y'; } || exit 77 -./input.at:1999: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-@@.y -./input.at:1681: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -61. input.at:2793: testing Redefined %union name ... stderr: +./input.at:2522: +set x `LC_ALL=C ls -l 'three.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='three.y'; } || exit 77 +60. input.at:2764: testing %name-prefix and api.prefix are incompatible ... +./input.at:2779: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-deprecated input.y +./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +stdout: +./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error ./input.at:2213: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none +stderr: +stderr: 0+0 records in 0+0 records out -0 bytes copied, 0.000152877 s, 0.0 kB/s +0 bytes copied, 5.7179e-05 s, 0.0 kB/s +./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y stdout: -stderr: 0+0 records in 0+0 records out -0 bytes copied, 5.0487e-05 s, 0.0 kB/s +0 bytes copied, 4.638e-05 s, 0.0 kB/s stdout: -./input.at:2607: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-char.y ./input.at:2528: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret three.y -./input.at:2415: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +36. input.at:1642: ok +./input.at:2607: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-char.y +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y stderr: -./input.at:2617: -set x `LC_ALL=C ls -l 'string.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='string.y'; } || exit 77 -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=full input.y ./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y -./input.at:2454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y input.y:1.16-18: error: duplicate directive [-Werror=other] 1 | %start exp foo exp | ^~~ @@ -3817,148 +3861,83 @@ 1 | %start exp foo exp | ^~~ input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -stderr: -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror +./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y 55. input.at:2482: ok +./input.at:2617: +set x `LC_ALL=C ls -l 'string.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='string.y'; } || exit 77 + +./input.at:1859: sed 's,.*/$,,' stderr 1>&2 +./input.at:2456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +43. input.at:1936: ok +stderr: + 0+0 records in 0+0 records out -0 bytes copied, 5.162e-05 s, 0.0 kB/s +0 bytes copied, 4.6695e-05 s, 0.0 kB/s stdout: +stderr: +61. input.at:2793: testing Redefined %union name ... +input.y:2.15: error: stray '$' [-Werror=other] +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y ./input.at:2620: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret string.y -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -./input.at:1859: sed 's,.*/$,,' stderr 1>&2 +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Syacc.c -Dparse.lac=unsupported input.y -./input.at:2780: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -p bar -Wno-deprecated input.y -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:2727: sed 's,.*/$,,' stderr 1>&2 +./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +62. input.at:2840: testing Stray $ or @ ... +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y ./input.at:2630: set x `LC_ALL=C ls -l 'escape-in-string.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-string.y'; } || exit 77 -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -stderr: -input.y:2.15: error: stray '$' [-Werror=other] -./input.at:2012: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret special-char-].y -62. input.at:2840: testing Stray $ or @ ... -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy +./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2781: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -Wno-deprecated input.y +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error +63. input.at:2883: testing Code injection ... +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y stderr: 0+0 records in 0+0 records out -0 bytes copied, 6.0148e-05 s, 0.0 kB/s +0 bytes copied, 5.1481e-05 s, 0.0 kB/s stdout: -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror -./input.at:2727: sed 's,.*/$,,' stderr 1>&2 ./input.at:2633: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-string.y -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.yy -./input.at:2781: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dapi.prefix={foo} -Wno-deprecated input.y -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y +24. input.at:1074: ok +./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./input.at:2643: set x `LC_ALL=C ls -l 'tstring.y'` && size=$6 && { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='tstring.y'; } || exit 77 -./input.at:2456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=none input.y -stderr: -stdout: -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror ./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Werror stderr: -input.y:3.8-10: error: %define variable 'api.value.union.name' redefined [-Werror=other] -input.y:1.8-10: note: previous definition -input.y:4.1-32: error: %define variable 'api.value.union.name' redefined [-Werror=other] -input.y:3.8-10: note: previous definition -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:1694: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -stderr: -stderr: -0+0 records in -0+0 records out -0 bytes copied, 5.3349e-05 s, 0.0 kB/s -stdout: -./input.at:2808: sed 's,.*/$,,' stderr 1>&2 -./input.at:2416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2646: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret tstring.y -43. input.at:1936: ok -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -input.y:2.16-18: error: useless %printer for type <*> [-Werror=other] -36. input.at:1642: ok -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror -./input.at:2782: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -p bar -Wno-deprecated input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y - -./input.at:2656: -set x `LC_ALL=C ls -l 'escape-in-tstring.y'` && - size=$6 && - { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-tstring.y'; } || exit 77 ./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none -./input.at:1124: sed 's,.*/$,,' stderr 1>&2 -stderr: -stderr: -input.y:11.19: error: stray '$' [-Werror=other] -input.y:11.23: error: stray '@' [-Werror=other] -input.y:12.19: error: stray '$' [-Werror=other] -input.y:12.23: error: stray '@' [-Werror=other] -input.y:13.19: error: stray '$' [-Werror=other] -input.y:13.23: error: stray '@' [-Werror=other] -input.y:16.19: error: stray '$' [-Werror=other] -input.y:16.23: error: stray '@' [-Werror=other] -input.y:17.19: error: stray '$' [-Werror=other] input.y:5.1-40: error: %define variable 'lr.type' requires keyword values [-Werror=deprecated] input.y:3.1-40: error: %define variable 'lr.default-reduction' requires keyword values [-Werror=deprecated] input.y:4.1-40: error: %define variable 'lr.keep-unreachable-state' requires keyword values [-Werror=deprecated] input.y:1.1-38: error: %define variable 'api.pure' requires keyword values [-Werror=deprecated] input.y:2.1-40: error: %define variable 'api.push-pull' requires keyword values [-Werror=deprecated] +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y stderr: -./input.at:2861: sed 's,.*/$,,' stderr 1>&2 0+0 records in 0+0 records out -0 bytes copied, 5.7383e-05 s, 0.0 kB/s +0 bytes copied, 6.5995e-05 s, 0.0 kB/s stdout: - -./input.at:2458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-tstring.y -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error -63. input.at:2883: testing Code injection ... -./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./input.at:2247: sed 's,.*/$,,' stderr 1>&2 -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y -64. input.at:2946: testing Deprecated directives ... -./input.at:3019: cp errors-all experr -./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -ffixit input.y -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror -57. input.at:2582: ok -60. input.at:2764: ok -./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error - -./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -W input.y - -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -65. input.at:3077: testing Unput's effect on locations ... -./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=none input.y -./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -66. input.at:3113: testing Non-deprecated directives ... -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -65. input.at:3077: ok -./input.at:3022: sed -e '/^fix-it:/d' errors-all >experr -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:3023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:1124: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:2782: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -p bar -Wno-deprecated input.y +./input.at:2646: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret tstring.y stderr: -stderr: +./input.at:2247: sed 's,.*/$,,' stderr 1>&2 +./input.at:1859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; | ^~~~~~~~~~~~~~~~~~~~~~~ @@ -4088,59 +4067,70 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ -./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none -input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] -input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] -input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Werror -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y -./input.at:2214: sed 's,.*/$,,' stderr 1>&2 -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full input.y +stderr: +./input.at:2727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none +input.y:3.8-10: error: %define variable 'api.value.union.name' redefined [-Werror=other] +input.y:1.8-10: note: previous definition +input.y:4.1-32: error: %define variable 'api.value.union.name' redefined [-Werror=other] +input.y:3.8-10: note: previous definition +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +64. input.at:2946: testing Deprecated directives ... ./input.at:786: sed 's,.*/$,,' stderr 1>&2 -67. input.at:3148: testing Cannot type action ... -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error 53. input.at:2393: ok -./input.at:3027: rm -f output.c +./input.at:2460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:3019: cp errors-all experr +./input.at:2656: +set x `LC_ALL=C ls -l 'escape-in-tstring.y'` && + size=$6 && + { test $size -eq 0 || dd obs=1 seek=`expr $size - 1` if=/dev/null of='escape-in-tstring.y'; } || exit 77 +./input.at:2808: sed 's,.*/$,,' stderr 1>&2 +./input.at:3020: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -ffixit input.y ./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error -./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Werror stderr: -24. input.at:1074: ok -./input.at:3028: cp input.y input.y.orig -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y -input.y:2.15: error: stray '$' [-Werror=other] -./input.at:3029: sed -e '/fix-it/d' experr -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./input.at:2730: sed 's,.*/$,,' stderr 1>&2 -62. input.at:2840: ok -./input.at:3030: echo "bison: file 'input.y' was updated (backup: 'input.y~')" >>experr +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y +stderr: +input.y:11.19: error: stray '$' [-Werror=other] +input.y:11.23: error: stray '@' [-Werror=other] +input.y:12.19: error: stray '$' [-Werror=other] +input.y:12.23: error: stray '@' [-Werror=other] +input.y:13.19: error: stray '$' [-Werror=other] +input.y:13.23: error: stray '@' [-Werror=other] +input.y:16.19: error: stray '$' [-Werror=other] +input.y:16.23: error: stray '@' [-Werror=other] +input.y:17.19: error: stray '$' [-Werror=other] -./input.at:3031: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update input.y +60. input.at:2764: ok +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +0+0 records in +0+0 records out +0 bytes copied, 4.7993e-05 s, 0.0 kB/s +stdout: +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=unsupported input.y +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Werror +./input.at:2659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret escape-in-tstring.y +./input.at:2861: sed 's,.*/$,,' stderr 1>&2 +65. input.at:3077: testing Unput's effect on locations ... -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y -stderr: -input.y:2.8-10: error: duplicate directive [-Werror=other] - 2 | %start exp - | ^~~ -input.y:1.8-10: note: previous declaration - 1 | %start exp foo - | ^~~ -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=error -./input.at:3034: diff input.y.orig input.y~ -stderr: -./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./input.at:3037: test ! -f output.c -input.y:14.1-15.5: error: duplicate directive: '%file-prefix' [-Werror=other] -input.y:13.1-18: note: previous declaration -input.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./input.at:3092: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2462: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -W input.y +./input.at:3022: sed -e '/^fix-it:/d' errors-all >experr +57. input.at:2582: ok +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=error +66. input.at:3113: testing Non-deprecated directives ... +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./input.at:3023: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +65. input.at:3077: ok +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none + +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror stderr: -./input.at:3040: sed -e '1,8d' input.y input.y:12.10-32: error: unset value: $$ [-Werror=other] 12 | a: INT | INT { } INT { } INT { }; | ^~~~~~~~~~~~~~~~~~~~~~~ @@ -4270,293 +4260,323 @@ input.y:26.40-42: error: unset value: $$ [-Werror=other] 26 | o: INT | INT { } INT { } INT { $$ = $1 + $2 + $3 + $4 + $5; }; | ^~~ -69. input.at:3205: testing %token-table and parse.error ... -./input.at:3220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -68. input.at:3171: testing Character literals and api.token.raw ... -./input.at:1877: sed 's,.*/$,,' stderr 1>&2 + ./input.at:2465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=none input.y +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y -Wnone,none -Werror --trace=none +67. input.at:3148: testing Cannot type action ... +./input.at:3027: rm -f output.c +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +./input.at:776: sed 's,.*/$,,' stderr 1>&2 +./input.at:3028: cp input.y input.y.orig +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./input.at:3029: sed -e '/fix-it/d' experr +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Werror +68. input.at:3171: testing Character literals and api.token.raw ... ./input.at:3181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y +./input.at:3030: echo "bison: file 'input.y' was updated (backup: 'input.y~')" >>experr +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error +./input.at:3031: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update input.y +stderr: +./input.at:2808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +input.yy:2.1-32: error: %define variable 'api.location.type' requires '{...}' values [-Werror=deprecated] +input.yy:4.1-32: error: %define variable 'api.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:5.1-32: error: %define variable 'api.token.prefix' requires '{...}' values [-Werror=deprecated] +input.yy:3.1-32: error: %define variable 'api.namespace' requires '{...}' values [-Werror=deprecated] +68. input.at:3171: ok +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y +./input.at:2214: sed 's,.*/$,,' stderr 1>&2 +stderr: +./input.at:3034: diff input.y.orig input.y~ +input.y:14.1-15.5: error: duplicate directive: '%file-prefix' [-Werror=other] +input.y:13.1-18: note: previous declaration +input.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +stderr: +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./input.at:2247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +input.y:2.8-10: error: duplicate directive [-Werror=other] + 2 | %start exp + | ^~~ +input.y:1.8-10: note: previous declaration + 1 | %start exp foo + | ^~~ +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./input.at:3037: test ! -f output.c +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=full input.y +./input.at:3040: sed -e '1,8d' input.y +./input.at:3133: sed 's,.*/$,,' stderr 1>&2 +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=error +./input.at:2861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall input.y --warnings=none -Werror --trace=none +./input.at:1877: sed 's,.*/$,,' stderr 1>&2 +./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +stderr: ./input.at:3062: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y +input.y:2.15: error: stray '$' [-Werror=other] +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error stderr: +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.d -Dparse.lac=unsupported input.y +69. input.at:3205: testing %token-table and parse.error ... +./input.at:3220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:2730: sed 's,.*/$,,' stderr 1>&2 input.y:10.6-13: error: only midrule actions can be typed: int [-Werror=other] 10 | exp: {} | ^~~~~~~~ -./input.at:3133: sed 's,.*/$,,' stderr 1>&2 -./input.at:776: sed 's,.*/$,,' stderr 1>&2 -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=none input.y -68. input.at:3171: ok -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none ./input.at:3156: sed 's,.*/$,,' stderr 1>&2 +./input.at:2820: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=error +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none +64. input.at:2946: ok +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +62. input.at:2840: ok +./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y + +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy -Wnone,none -Werror --trace=none +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=none input.y +./input.at:3221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y 49. input.at:2224: ok +./input.at:2825: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y + +./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none 70. input.at:3231: testing Invalid file prefix mapping arguments ... ./input.at:3246: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo input.y ./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y -Wnone,none -Werror --trace=none -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=error -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +71. named-refs.at:22: testing Tutorial calculator ... ./input.at:3247: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --file-prefix-map foo input.y -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -64. input.at:2946: ok - -61. input.at:2793: ok -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full input.y -71. named-refs.at:22: testing Tutorial calculator ... -./input.at:3221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none ./named-refs.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./input.at:2467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +54. input.at:2429: ok +./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./input.at:3248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo=bar -M baz input.y +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full input.y 72. named-refs.at:196: testing Undefined and ambiguous references ... ./named-refs.at:254: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y -./input.at:3248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo=bar -M baz input.y - -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none -72. named-refs.at:196: ok +69. input.at:3205: ok ./input.at:2730: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -W input.y --warnings=none -Werror --trace=none -./input.at:2934: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y - -73. named-refs.at:297: testing Misleading references ... -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y ./input.at:3249: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -M foo= -M baz input.y +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y +61. input.at:2793: ok + +66. input.at:3113: ok +72. named-refs.at:196: ok ./input.at:2214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.yy --warnings=none -Werror --trace=none -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +70. input.at:3231: ok +./input.at:2685: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=unsupported input.y 74. named-refs.at:316: testing Many kinds of errors ... ./named-refs.at:384: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S yacc.c -d input.y -./input.at:2469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./named-refs.at:426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y -70. input.at:3231: ok -75. named-refs.at:551: testing Missing identifiers in brackets ... -./named-refs.at:559: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -69. input.at:3205: ok -./input.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./named-refs.at:184: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Werror -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-none input.y -75. named-refs.at:551: ok -74. named-refs.at:316: ok -./input.at:3133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +73. named-refs.at:297: testing Misleading references ... +40. input.at:1826: ok +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y + +./named-refs.at:426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o test.c test.y +67. input.at:3148: ok +./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none +./named-refs.at:184: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS 76. named-refs.at:567: testing Redundant words in brackets ... -./input.at:3156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./named-refs.at:575: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y -Wnone,none -Werror --trace=none -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.c -d input.y -54. input.at:2429: ok -stderr: +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-none input.y +75. named-refs.at:551: testing Missing identifiers in brackets ... +./named-refs.at:559: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./named-refs.at:575: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y 78. named-refs.at:599: testing Stray symbols in brackets ... -./named-refs.at:607: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' test.y || exit 77 -76. named-refs.at:567: ok +74. named-refs.at:316: ok 77. named-refs.at:583: testing Comments in brackets ... + +./named-refs.at:607: "$PERL" -pi -e 's/\\(\d{3})/chr(oct($1))/ge' test.y || exit 77 ./named-refs.at:591: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./named-refs.at:608: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -test.y:11.22-29: error: misleading reference: '$foo.bar' [-Werror=other] -test.y:11.8-10: note: refers to: $foo at $1 -test.y:11.12-18: note: possibly meant: $[foo.bar] at $2 -79. named-refs.at:618: testing Redundant words in LHS brackets ... 48. input.at:2191: ok +79. named-refs.at:618: testing Redundant words in LHS brackets ... +./named-refs.at:608: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y +76. named-refs.at:567: ok ./named-refs.at:625: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -./named-refs.at:306: sed 's,.*/$,,' stderr 1>&2 +75. named-refs.at:551: ok 77. named-refs.at:583: ok +80. named-refs.at:635: testing Factored LHS ... -66. input.at:3113: ok -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Werror -78. named-refs.at:599: - ok -40. input.at:1826: ok +78. named-refs.at:599: ok +./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y 79. named-refs.at:618: ok -./input.at:786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=error -67. input.at:3148: ok -81. named-refs.at:648: testing Unresolved references ... -./named-refs.at:676: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y - -./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +./input.at:2697: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Werror -80. named-refs.at:635: testing Factored LHS ... -./named-refs.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.cc -d input.y +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y -81. named-refs.at:648: ok -87. output.at:87: testing Output files: api.header.include={"./foo.h"} -dv -y ... -./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y -83. output.at:68: testing Output files: -dv ... +81. named-refs.at:648: testing Unresolved references ... +./named-refs.at:676: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -84. output.at:74: testing Output files: -dv >&- ... -./output.at:74: case "$PREBISON" in *valgrind*) exit 77;; esac 82. named-refs.at:715: testing $ or @ followed by . or - ... -./named-refs.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -86. output.at:84: testing Output files: -dv -y ... +83. output.at:68: testing Output files: -dv ... 85. output.at:81: testing Output files: -dv -o foo.c ... ./output.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv foo.y - +./named-refs.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y 80. named-refs.at:635: ok -./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv >&- foo.y -stderr: -./output.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y +84. output.at:74: testing Output files: -dv >&- ... +./output.at:74: case "$PREBISON" in *valgrind*) exit 77;; esac +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Werror +86. output.at:84: testing Output files: -dv -y ... ./output.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.c foo.y -88. output.at:92: testing Output files: -dv -o foo.tab.c ... +87. output.at:87: testing Output files: api.header.include={"./foo.h"} -dv -y ... +./output.at:87: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y +./output.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -y foo.y +./output.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv >&- foo.y +81. named-refs.at:648: ok ./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret test.y +stderr: +test.y:11.22-29: error: misleading reference: '$foo.bar' [-Werror=other] +test.y:11.8-10: note: refers to: $foo at $1 +test.y:11.12-18: note: possibly meant: $[foo.bar] at $2 -input.y:2.15: error: stray '$' [-Werror=other] -89. output.at:95: testing Output files: --fixed-output-files -dv -g --html ... -./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.tab.c foo.y -89. output.at:95: ./input.at:2733: sed 's,.*/$,,' stderr 1>&2 stderr: - skipped (output.at:95) -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none -foo.y:1.1-7: warning: POSIX Yacc does not support %define [-Wyacc] -./output.at:87: find . -type f | +stderr: +./output.at:68: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -90. output.at:97: testing Output files: -Hfoo.header -v -gfoo.gv --html=foo.html ... -./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr.cc -d input.y -90. output.at:97: stderr: - skipped (output.at:97) +input.y:2.15: error: stray '$' [-Werror=other] +./named-refs.at:306: sed 's,.*/$,,' stderr 1>&2 +./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.cc -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +16. input.at:784: ok +./input.at:2733: sed 's,.*/$,,' stderr 1>&2 +83. output.at:68: ok +88. output.at:92: testing Output files: -dv -o foo.tab.c ... +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=error +stderr: +./output.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -o foo.tab.c foo.y ./output.at:84: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +89. output.at:95: testing Output files: --fixed-output-files -dv -g --html ... +stderr: +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y ./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=error stderr: -./output.at:87: grep '#include "./foo.h"' y.tab.c -./output.at:68: find . -type f | +89. output.at:95: ./output.at:81: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -stdout: -#include "./foo.h" -87. output.at:87: ok -./output.at:84: grep '#include "y.tab.h"' y.tab.c -stderr: -91. output.at:100: testing Output files: -dv -g --xml --fixed-output-files ... - -stderr: -stdout: -./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y -stderr: ./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Werror -./output.at:81: find . -type f | + skipped (output.at:95) +stderr: +./output.at:74: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:92: find . -type f | + +foo.y:1.1-7: warning: POSIX Yacc does not support %define [-Wyacc] +./output.at:87: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -86. output.at:84: ./output.at:74: find . -type f | +./output.at:84: grep '#include "y.tab.h"' y.tab.c +./output.at:81: grep '#include "foo.h"' foo.c +stdout: + +stdout: + +86. output.at:84: ok +#include "foo.h" +90. output.at:97: testing Output files: -Hfoo.header -v -gfoo.gv --html=foo.html ... +85. output.at:81: ok +90. output.at:97: 84. output.at:74: ok + skipped (output.at:97) +stderr: +./output.at:87: grep '#include "./foo.h"' y.tab.c +stderr: +test.y:4.9: error: stray '$' [-Werror=other] +test.y:5.9: error: stray '@' [-Werror=other] +91. output.at:100: testing Output files: -dv -g --xml --fixed-output-files ... +stdout: +./output.at:92: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - ok -./input.at:776: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --warnings=midrule-values -fcaret input.y --warnings=none -Werror --trace=none -83. output.at:68: ok +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y -Wnone,none -Werror --trace=none +#include "./foo.h" +87. output.at:87: ok 92. output.at:102: testing Output files: -dv -g --xml -y ... +./output.at:100: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml --fixed-output-files foo.y + +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none ./output.at:102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y -./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=none -Werror --trace=none -88. output.at:92: ok -16. input.at:784: ok -./output.at:81: grep '#include "foo.h"' foo.c -stdout: -#include "foo.h" -stderr: -stdout: +./input.at:2704: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Slalr1.java -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -Dparse.lac.memory-trace=full input.y +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y -./named-refs.at:185: $PREPARSER ./test input.txt -84. output.at:74: ok -stderr: -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y -Wnone,none -Werror --trace=none +./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2 -./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -85. output.at:81: ok +88. output.at:92: ok +./named-refs.at:306: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o test.c test.y --warnings=none -Werror --trace=none +95. output.at:110: testing Output files: -dv -b bar ... +94. output.at:107: testing Output files: -dv -g --xml -o y.tab.c ... +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=error +15. input.at:774: ok +./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -b bar foo.y 93. output.at:104: testing Output files: %require "3.4" -dv -g --xml -y ... ./output.at:104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -y foo.y +96. output.at:112: testing Output files: -dv -g -o foo.c ... +./output.at:107: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -o y.tab.c foo.y + stderr: +./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g -o foo.c foo.y :6: warning: deprecated option: '--fixed-output-files', use '-o y.tab.c' [-Wdeprecated] ./output.at:100: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -94. output.at:107: testing Output files: -dv -g --xml -o y.tab.c ... -stderr: - - -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S glr2.cc -d input.y -./output.at:107: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g --xml -o y.tab.c foo.y -71. named-refs.at:22: 96. output.at:112: testing Output files: -dv -g -o foo.c ... - ok -58. input.at:2675: ok -73. named-refs.at:297: ok -95. output.at:110: testing Output files: -dv -b bar ... -./output.at:110: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -b bar foo.y +97. output.at:116: testing Output files: %header %verbose ... +98. output.at:118: testing Output files: %header %verbose %yacc ... +./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y stderr: -91. output.at:100: ok -test.y:4.9: error: stray '$' [-Werror=other] -test.y:5.9: error: stray '@' [-Werror=other] +./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none -stderr: -98. output.at:118: testing Output files: %header %verbose %yacc ... +./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y ./output.at:118: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y ./output.at:102: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:112: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -dv -g -o foo.c foo.y -./named-refs.at:740: sed 's,.*/$,,' stderr 1>&2 -97. output.at:116: testing Output files: %header %verbose ... -./output.at:116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y - -92. output.at:102: ok -./input.at:2733: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-none input.y --warnings=none -Werror --trace=none -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=error - -99. output.at:121: testing Output files: %header %verbose %yacc ... - -101. output.at:127: testing Output files: %output "bar.c" %header %verbose %yacc ... -./output.at:121: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy - -stderr: -./output.at:127: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.d input.y +91. output.at:100: ok +58. input.at:2675: ok stderr: ./output.at:110: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none +73. named-refs.at:297: ok +92. output.at:102: ok stderr: -stderr: -./output.at:107: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; - push @file, $_ unless m{^(foo.y|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 foo.y:1.1-8: warning: POSIX Yacc does not support %require [-Wyacc] foo.y:1.10-14: warning: POSIX Yacc does not support string literals [-Wyacc] ./output.at:104: find . -type f | @@ -4564,141 +4584,165 @@ s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -103. output.at:136: testing Output files: %header %verbose ... -102. output.at:129: testing Output files: %file-prefix "baz" %output "bar.c" %header %verbose %yacc ... -./output.at:112: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; - push @file, $_ unless m{^(foo.y|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 - -100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose ... -./output.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y -94. output.at:107: ok stderr: stderr: -./output.at:118: find . -type f | +stderr: +stdout: +./output.at:107: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -93. output.at:104: ok -./output.at:116: find . -type f | +./named-refs.at:185: $PREPARSER ./test input.txt +99. output.at:121: testing Output files: %header %verbose %yacc ... +./output.at:121: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:112: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 95. output.at:110: ok -./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,none,other input.y -104. output.at:139: testing Output files: %header %verbose -o foo.c ... +93. output.at:104: ok +stderr: + + +./named-refs.at:185: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + 96. output.at:112: ok stderr: -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y -Wnone,none -Werror --trace=none +stderr: -./input.at:2935: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -S lalr1.java input.y -./output.at:127: find . -type f | +./output.at:116: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +94. output.at:107: ok +63. input.at:2883: ok +100. output.at:125: testing Output files: %file-prefix "bar" %header %verbose ... +101. output.at:127: testing Output files: %output "bar.c" %header %verbose %yacc ... + +71. named-refs.at:22: ok +./output.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +./input.at:2738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,none,other input.y + +102. output.at:129: testing Output files: %file-prefix "baz" %output "bar.c" %header %verbose %yacc ... stderr: -./output.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c foo.yy -./output.at:125: find . -type f | +97. output.at:116: ok +./output.at:127: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +./output.at:118: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -98. output.at:118: ok -97. output.at:116: ok +103. output.at:136: testing Output files: %header %verbose ... +./output.at:129: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.y +./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none +./output.at:136: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy + + +104. output.at:139: testing Output files: %header %verbose -o foo.c ... stderr: -./output.at:129: find . -type f | + +98. output.at:118: ok +./output.at:121: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.y|testsuite.log)$}; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -107. output.at:150: testing Output files: %header "foo.hpp" -o foo.c++ ... +./output.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c foo.yy 105. output.at:142: testing Output files: --header=foo.hpp -o foo.c++ ... + ./output.at:142: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy +106. output.at:146: testing Output files: --header=foo.hpp -o foo.c++ ... +108. output.at:154: testing Output files: -o foo.c++ --graph=foo.gph ... +107. output.at:150: testing Output files: %header "foo.hpp" -o foo.c++ ... +./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,no-all,other input.y stderr: -./output.at:121: find . -type f | +./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy +./output.at:125: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -15. input.at:774: ok -101. output.at:127: ok -./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy -100. output.at:125: 102. output.at:129: ok - ok +./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy stderr: +99. output.at:121: ok -106. output.at:146: testing Output files: --header=foo.hpp -o foo.c++ ... ./output.at:136: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header=foo.hpp -o foo.c++ foo.yy - -99. output.at:121: ok -63. input.at:2883: ok -./input.at:2741: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror,no-all,other input.y +./output.at:150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy 109. output.at:160: testing Output files: %type useless --header --graph --xml --report=all -Wall -Werror ... +110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ... +./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y +100. output.at:125: ok +./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y +111. output.at:173: testing Output files: %defines -o foo.c++ ... +stderr: +103. output.at:136: ok +./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy -108. output.at:154: testing Output files: -o foo.c++ --graph=foo.gph ... -./output.at:154: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ --graph=foo.gph foo.yy stderr: -./output.at:139: find . -type f | + +./output.at:129: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -110. output.at:167: testing Output files: useless=--header --graph --xml --report=all -Wall -Werror ... -./named-refs.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret test.y --warnings=none -Werror --trace=none -./output.at:160: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y - -103. output.at:136: - ok -./output.at:167: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header --graph --xml --report=all -Wall -Werror foo.y -stderr: -./output.at:150: find . -type f | +./output.at:146: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - - -104. output.at:139: ok - -111. output.at:173: testing Output files: %defines -o foo.c++ ... +82. named-refs.at:715: ok stderr: -113. output.at:191: testing Output files: lalr1.cc ... -./output.at:173: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy stderr: -./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +stderr: +./output.at:127: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(foo.y|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 +106. output.at:146: ok +./output.at:139: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 112. output.at:176: testing Output files: %defines "foo.hpp" -o foo.c++ ... +./output.at:176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy +stderr: + ./output.at:142: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o foo.c++ foo.yy -./output.at:146: find . -type f | +113. output.at:191: testing Output files: lalr1.cc ... +102. output.at:129: ok +./output.at:150: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -107. output.at:150: ok - ./input.at:2746: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror -Wno-error=other input.y stderr: -116. output.at:200: testing Output files: lalr1.cc %verbose %locations ... -./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:154: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 +./output.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +104. output.at:139: ok +114. output.at:194: testing Output files: lalr1.cc %verbose ... +stderr: +105. output.at:142: 101. output.at:127: ok + + ok foo.y:1.13-19: error: symbol 'useless' is used, but is not defined as a token and has no rules [-Werror=other] foo.y: error: 1 nonterminal useless in grammar [-Werror=other] foo.y:1.13-19: error: nonterminal useless in grammar: useless [-Werror=other] @@ -4707,23 +4751,20 @@ s,\./,,; chomp; push @file, $_ unless m{^(foo.y|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -114. output.at:194: testing Output files: lalr1.cc %verbose ... +107. output.at:150: ok ./output.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -115. output.at:197: testing Output files: lalr1.cc %header %verbose ... -./output.at:197: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy + +108. output.at:154: ok stderr: -105. output.at:142: ok -82. named-refs.at:715: ok -117. output.at:203: testing Output files: lalr1.cc %header %verbose %locations ... -foo.y:1.1-15: error: %define variable 'useless' is not used stderr: -106. output.at:146: -./output.at:191: find . -type f | + + +foo.y:1.1-15: error: %define variable 'useless' is not used +./output.at:173: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - ok ./output.at:167: find . -type f | "$PERL" -ne ' s,\./,,; chomp; @@ -4732,323 +4773,319 @@ 109. output.at:160: ok stderr: -./output.at:203: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -./output.at:154: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 -stderr: -110. output.at:167: ok -./output.at:173: find . -type f | +115. output.at:197: testing Output files: lalr1.cc %header %verbose ... +116. output.at:200: testing Output files: lalr1.cc %verbose %locations ... + +./output.at:200: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:191: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ... -118. output.at:206: testing Output files: lalr1.cc %header %verbose ... -./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-error=other -Werror input.y stderr: +./output.at:197: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy + +117. output.at:203: testing Output files: lalr1.cc %header %verbose %locations ... ./output.at:176: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -stderr: - +111. output.at:173: ok +./input.at:2750: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-error=other -Werror input.y +110. output.at:167: ok +./output.at:203: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +118. output.at:206: testing Output files: lalr1.cc %header %verbose ... +120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo" ... +119. output.at:210: testing Output files: lalr1.cc %header %verbose %locations -o subdir/foo.cc ... -./output.at:200: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; - push @file, $_ unless m{^(foo.yy|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 113. output.at:191: ok - -111. output.at:173: ok -stderr: -108. output.at:154: ok -120. output.at:215: testing Output files: lalr1.cc %header %verbose %file-prefix "output_dir/foo" ... +112. output.at:176: ok stderr: -./output.at:197: find . -type f | +121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo" ... +./output.at:194: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:194: find . -type f | + +stderr: +./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret subdir/foo.yy +122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2" ... +./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:200: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -112. output.at:176: ok -./output.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret subdir/foo.yy -123. output.at:231: testing Output files: lalr1.cc %header %locations api.location.file="foo.loc.hh" %require "3.2" ... -./output.at:231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -122. output.at:226: testing Output files: lalr1.cc %header %locations api.location.file=none %require "3.2" ... -121. output.at:220: testing Output files: lalr1.cc %header %locations %verbose %file-prefix "output_dir/foo" ... - -115. output.at:197: ok -116. output.at:200: ok -./output.at:226: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy -114. output.at:194: ok - - 124. output.at:237: testing Output files: lalr1.cc %header %locations api.location.file="$at_dir/foo.loc.hh" %require "3.2" ... + +123. output.at:231: testing Output files: lalr1.cc %header %locations api.location.file="foo.loc.hh" %require "3.2" ... ./output.at:237: "$PERL" -pi -e 's{\$at_dir}'"{$at_group_dir}g" foo.yy || exit 77 +114. output.at:194: ok +./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror=other -Wno-other input.y +./output.at:237: rm -f foo.yy.bak +./output.at:231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +./output.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy +stderr: 125. output.at:267: testing Conflicting output files: --graph="foo.tab.c" ... ./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -stderr: -./input.at:2754: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror=other -Wno-other input.y -./output.at:203: find . -type f | +./output.at:197: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -127. output.at:277: testing Conflicting output files: lalr1.cc %header %locations --graph="location.hh" ... -./output.at:237: rm -f foo.yy.bak -./output.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret foo.yy - +116. output.at:200: ok 126. output.at:272: testing Conflicting output files: %header "foo.output" -v ... - -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="location.hh" foo.y +127. output.at:277: testing Conflicting output files: lalr1.cc %header %locations --graph="location.hh" ... ./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v foo.y - stderr: +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --graph="location.hh" foo.y + ./output.at:206: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -117. output.at:203: ok -128. output.at:282: testing Conflicting output files: -o foo.y ... -./output.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o foo.y foo.y -./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy +115. output.at:197: ok stderr: -./output.at:206: grep 'include .subdir/' foo.tab.cc +stderr: +./output.at:203: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(foo.yy|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 + ./output.at:231: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -129. output.at:328: testing Output file name: `~!@#$%^&*()-=_+{}[]|\:;<>, .' ... -./output.at:328: touch "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.tmp" || exit 77 +./output.at:206: grep 'include .subdir/' foo.tab.cc stderr: -130. output.at:335: testing Output file name: ( ... -./output.at:335: touch "(.tmp" || exit 77 -131. output.at:336: testing Output file name: ) ... -./output.at:336: touch ").tmp" || exit 77 -./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y -./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy ./output.at:206: grep 'include .subdir/' foo.tab.hh -./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").c" --header=").h" glr.y +./output.at:215: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy ./output.at:226: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 - -118. output.at:206: ok +128. output.at:282: testing Conflicting output files: -o foo.y ... 59. input.at:2719: ok -123. output.at:231: ok -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Werror +118. output.at:206: ok stderr: + ./output.at:237: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 -./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.c" --header="(.h" glr.y +129. output.at:328: testing Output file name: `~!@#$%^&*()-=_+{}[]|\:;<>, .' ... +./output.at:328: touch "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.tmp" || exit 77 +123. output.at:231: ok +117. output.at:203: ok +./output.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o foo.y foo.y +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Werror +./output.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy +122. output.at:226: ok +./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" glr.y +124. output.at:237: ok +130. output.at:335: testing Output file name: ( ... +./output.at:335: touch "(.tmp" || exit 77 + +./output.at:210: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o subdir/foo.cc subdir/foo.yy + ./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Werror -./output.at:282: cat foo.y -122. output.at:226: ok +stderr: +./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.c" --header="(.h" glr.y +foo.y: error: conflicting outputs to file 'foo.tab.c' [-Werror=other] ./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Werror -./output.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret gram_dir/foo.yy -132. output.at:337: testing Output file name: # ... -./output.at:337: touch "#.tmp" || exit 77 + +131. output.at:336: testing Output file name: ) ... +./output.at:336: touch ").tmp" || exit 77 stderr: -128. output.at:282: ok -./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.c" --header="#.h" glr.y -124. output.at:237: ok -133. output.at:338: testing Output file name: @@ ... -./output.at:338: touch "@@.tmp" || exit 77 ./output.at:215: find . -type f | "$PERL" -ne ' s,\./,,; chomp; push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 stderr: +132. output.at:337: testing Output file name: # ... +./output.at:337: touch "#.tmp" || exit 77 -./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.c" --header="@@.h" glr.y -./output.at:210: find . -type f | +./output.at:267: sed 's,.*/$,,' stderr 1>&2 +./output.at:220: find . -type f | "$PERL" -ne ' s,\./,,; chomp; - push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; + push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; END { print join (" ", sort @file), "\n" }' || exit 77 +133. output.at:338: testing Output file name: @@ ... +./output.at:338: touch "@@.tmp" || exit 77 +./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").c" --header=").h" glr.y ./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.h" -./output.at:336: ls ").c" ").h" -stdout: -stdout: - -./output.at:335: ls "(.c" "(.h" -).c -).h +./output.at:282: cat foo.y +./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.c" --header="#.h" glr.y 134. output.at:339: testing Output file name: @{ ... ./output.at:339: touch "@{.tmp" || exit 77 -./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.c" --header="@{.h" glr.y -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.c -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.h -120. output.at:215: ok -stderr: -stdout: 135. output.at:340: testing Output file name: @} ... ./output.at:340: touch "@}.tmp" || exit 77 -(.c -(.h -foo.y: error: conflicting outputs to file 'foo.tab.c' [-Werror=other] - - -./output.at:336: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c ").c" +stdout: +120. output.at:215: ok +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.c +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.h +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error +121. output.at:220: ok +./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.c" --header="@@.h" glr.y +./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.c" --header="@{.h" glr.y ./output.at:328: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.c" +128. output.at:282: ok ./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.c" --header="@}.h" glr.y -./output.at:335: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "(.c" -./output.at:210: grep 'include .subdir/' subdir/foo.cc -136. output.at:341: testing Output file name: [ ... -./output.at:341: touch "[.tmp" || exit 77 -./output.at:267: sed 's,.*/$,,' stderr 1>&2 -./output.at:337: ls "#.c" "#.h" stderr: foo.y: error: conflicting outputs to file 'location.hh' [-Werror=other] +./output.at:335: ls "(.c" "(.h" +136. output.at:341: testing Output file name: [ ... +./output.at:341: touch "[.tmp" || exit 77 stderr: + +./output.at:277: sed 's,.*/$,,' stderr 1>&2 +./output.at:210: find . -type f | + "$PERL" -ne ' + s,\./,,; chomp; + push @file, $_ unless m{^(subdir/foo.yy|testsuite.log)$}; + END { print join (" ", sort @file), "\n" }' || exit 77 stdout: stderr: foo.y: error: conflicting outputs to file 'foo.output' [-Werror=other] - ./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.c" --header="[.h" glr.y +(.c +(.h + + +./output.at:337: ls "#.c" "#.h" +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error +./output.at:336: ls ").c" ").h" +./output.at:335: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "(.c" +./output.at:272: sed 's,.*/$,,' stderr 1>&2 +stdout: +137. output.at:342: testing Output file name: ] ... +./output.at:342: touch "].tmp" || exit 77 +stdout: #.c #.h -./output.at:277: sed 's,.*/$,,' stderr 1>&2 -./output.at:220: find . -type f | - "$PERL" -ne ' - s,\./,,; chomp; - push @file, $_ unless m{^(gram_dir/foo.yy|testsuite.log)$}; - END { print join (" ", sort @file), "\n" }' || exit 77 +./output.at:210: grep 'include .subdir/' subdir/foo.cc +./output.at:340: ls "@}.c" "@}.h" ./output.at:337: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "#.c" -138. output.at:363: testing Graph with no conflicts ... +).c +).h ./output.at:210: grep 'include .subdir/' subdir/foo.hh -137. output.at:342: testing Output file name: ] ... -./output.at:342: touch "].tmp" || exit 77 -./output.at:363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].c" --header="].h" glr.y -119. output.at:210: ok -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=error -./output.at:338: ls "@@.c" "@@.h" -./output.at:272: sed 's,.*/$,,' stderr 1>&2 +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Wnone,none -Werror --trace=none stdout: -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=error -@@.c -@@.h -./output.at:338: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@@.c" -./output.at:339: ls "@{.c" "@{.h" -121. output.at:220: ok +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=error +./output.at:336: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c ").c" 139. output.at:403: testing Graph with unsolved S/R ... +@}.c +@}.h ./output.at:403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -./output.at:340: ls "@}.c" "@}.h" +./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].c" --header="].h" glr.y +119. output.at:210: 138. output.at:363: testing Graph with no conflicts ... +./output.at:363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y + ok +./output.at:340: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@}.c" ./output.at:341: ls "[.c" "[.h" +./output.at:338: ls "@@.c" "@@.h" +./output.at:339: ls "@{.c" "@{.h" stdout: -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=error stdout: -@}.c -@}.h stdout: -@{.c -@{.h -./output.at:339: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@{.c" -stderr: -./output.at:340: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@}.c" - -./output.at:363: grep -v // input.gv [.c [.h -./output.at:342: ls "].c" "].h" -stdout: -].c -].h +@{.c +@{.h +@@.c +@@.h ./output.at:341: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "[.c" -138. output.at:363: ok -./output.at:342: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "].c" +./output.at:338: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@@.c" +./output.at:339: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "@{.c" -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y -Wnone,none -Werror --trace=none -140. output.at:473: testing Graph with solved S/R ... -./output.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y stderr: -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none ./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y -Wnone,none -Werror --trace=none +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y -Wnone,none -Werror --trace=none +./output.at:363: grep -v // input.gv +./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=none -Werror --trace=none +stderr: input.y: warning: 3 shift/reduce conflicts [-Wconflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:10.10-18: warning: rule useless in parser due to conflicts [-Wother] input.y:11.10-18: warning: rule useless in parser due to conflicts [-Wother] input.y:12.10-18: warning: rule useless in parser due to conflicts [-Wother] ./output.at:403: grep -v // input.gv -141. output.at:538: testing Graph with R/R ... +138. output.at:363: ok +./output.at:342: ls "].c" "].h" +140. output.at:473: testing Graph with solved S/R ... +stdout: +./output.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +].c +].h +139. output.at:403: ok +./output.at:342: $CC $CFLAGS $CPPFLAGS -c -o glr.o -c "].c" -./output.at:538: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none + +./output.at:267: cat foo.y +141. output.at:538: testing Graph with R/R ... stderr: +./output.at:538: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y +./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none input.y:6.5-7: warning: rule useless in parser due to conflicts [-Wother] input.y:14.10-18: warning: rule useless in parser due to conflicts [-Wother] input.y:15.10-18: warning: rule useless in parser due to conflicts [-Wother] ./output.at:473: grep -v // input.gv -139. output.at:403: ok -140. output.at:473: ok +125. output.at:267: ok 142. output.at:576: testing Graph with reductions with multiple LAT ... ./output.at:576: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -./output.at:272: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v foo.y --warnings=none -Werror --trace=none +140. output.at:473: ok +./output.at:277: cat foo.y stderr: -./output.at:267: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="foo.tab.c" foo.y --warnings=none -Werror --trace=none -./output.at:277: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --graph="location.hh" foo.y --warnings=none -Werror --trace=none +127. output.at:277: ok input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:4.3: warning: rule useless in parser due to conflicts [-Wother] ./output.at:538: grep -v // input.gv 143. output.at:641: testing Graph with a reduction rule both enabled and disabled ... -141. output.at:538: ok ./output.at:641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall --graph input.y -144. output.at:744: testing C++ Output File Prefix Mapping ... -./output.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x1.cc -M out/=bar/ x1.yy ./output.at:272: cat foo.y -126. output.at:272: ok stderr: +141. output.at:538: ok +144. output.at:744: testing C++ Output File Prefix Mapping ... +./output.at:775: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x1.cc -M out/=bar/ x1.yy input.y: warning: 3 reduce/reduce conflicts [-Wconflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:2.14-18: warning: rule useless in parser due to conflicts [-Wother] input.y:5.3: warning: rule useless in parser due to conflicts [-Wother] ./output.at:576: grep -v // input.gv -./output.at:277: cat foo.y +126. output.at:272: ok 142. output.at:576: ok -./output.at:267: cat foo.y -./output.at:775: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc -127. output.at:277: ok - -125. output.at:267: ok 145. diagnostics.at:84: testing Warnings ... -146. diagnostics.at:133: testing Single point locations ... ./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -149. diagnostics.at:235: testing Tabulations and multibyte characters ... +./output.at:775: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x1.o out/x1.cc +146. diagnostics.at:133: testing Single point locations ... +148. diagnostics.at:217: testing Zero-width characters ... 147. diagnostics.at:182: testing Line is too short, and then you die ... ./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -148. diagnostics.at:217: testing Zero-width characters ... -./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 ./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:182: "$PERL" -pi -e ' +./diagnostics.at:84: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5056,7 +5093,8 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:84: "$PERL" -pi -e ' +./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:182: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5064,11 +5102,8 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:217: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 ./diagnostics.at:84: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:133: "$PERL" -pi -e ' +./diagnostics.at:217: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5076,9 +5111,8 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -stderr: -./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:217: "$PERL" -pi -e ' +./diagnostics.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:133: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5086,12 +5120,33 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 +./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +147. diagnostics.at:182: ok +148. diagnostics.at:217: ok + +145. diagnostics.at:84: ok + +149. diagnostics.at:235: testing Tabulations and multibyte characters ... +146. diagnostics.at:133: ok +150. diagnostics.at:282: testing Tabulations and multibyte characters ... + +stderr: input.y: warning: 4 shift/reduce conflicts [-Wconflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples ./output.at:641: grep -v // input.gv -147. diagnostics.at:182: ok -./diagnostics.at:217: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +151. diagnostics.at:303: testing Special files ... + +143. output.at:641: ok +./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y stderr: +stdout: +152. diagnostics.at:328: testing Complaints from M4 ... +./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y +./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y + ./diagnostics.at:235: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5100,47 +5155,7 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -143. output.at:641: ok -stdout: -148. diagnostics.at:217: - ok ./diagnostics.at:235: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -150. diagnostics.at:282: testing Tabulations and multibyte characters ... -./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").cc" --header=").hh" cxx.y -145. diagnostics.at:84: ok -stderr: - -146. diagnostics.at:133: ok -stdout: -stderr: -stdout: -stderr: -./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y -stdout: - -./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.cc" --header="@@.hh" cxx.y -./output.at:336: ls ").cc" ").hh" -151. diagnostics.at:303: testing Special files ... -stderr: -./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y -stdout: - -stdout: - -./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y -).cc -).hh -152. diagnostics.at:328: testing Complaints from M4 ... -./output.at:336: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c ").cc" -./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" -154. diagnostics.at:372: testing CR NL ... -stdout: -153. diagnostics.at:351: testing Carriage return ... -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc -`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh -./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -./output.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" ./diagnostics.at:282: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5149,31 +5164,9 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./output.at:338: ls "@@.cc" "@@.hh" -149. diagnostics.at:235: ok -stdout: +153. diagnostics.at:351: testing Carriage return ... ./diagnostics.at:282: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y stderr: -@@.cc -@@.hh -stdout: -./output.at:335: ls "(.cc" "(.hh" -./diagnostics.at:372: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 -./output.at:338: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@@.cc" -./output.at:340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@}.cc" --header="@}.hh" cxx.y -./output.at:337: ls "#.cc" "#.hh" -stdout: -stdout: -#.cc -#.hh -./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y -(.cc -(.hh -./output.at:335: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "(.cc" -./output.at:337: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "#.cc" - -./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 ./diagnostics.at:303: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5182,10 +5175,18 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -150. diagnostics.at:282: ok -155. diagnostics.at:399: testing Screen width: 200 columns ... +stdout: +./diagnostics.at:351: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 +./output.at:336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o ").cc" --header=").hh" cxx.y +./output.at:340: ls "@}.cc" "@}.hh" ./diagnostics.at:303: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y ./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +stdout: +@}.cc +@}.hh +stderr: +./output.at:340: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@}.cc" +150. diagnostics.at:282: ok ./diagnostics.at:351: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5194,10 +5195,9 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./output.at:340: ls "@}.cc" "@}.hh" - -./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret --color=debug -Wall,cex input.y -./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +stdout: +149. diagnostics.at:235: ok +./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.cc" --header="@{.hh" cxx.y ./diagnostics.at:328: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5206,7 +5206,64 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./diagnostics.at:372: "$PERL" -pi -e ' +./diagnostics.at:351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +stderr: +stdout: +./output.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" --header="\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" cxx.y +./output.at:336: ls ").cc" ").hh" + + +stdout: +153. diagnostics.at:351: ok +).cc +).hh +./output.at:336: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c ").cc" +./output.at:339: ls "@{.cc" "@{.hh" +stderr: +stdout: +154. diagnostics.at:372: testing CR NL ... +151. diagnostics.at:303: ok +stderr: +@{.cc +@{.hh +stdout: +155. diagnostics.at:399: testing Screen width: 200 columns ... +stdout: +./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y +./output.at:339: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@{.cc" +./output.at:337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "#.cc" --header="#.hh" cxx.y + +./diagnostics.at:372: "$PERL" -pi -e 's{\^M}{\r}g;s{\\(\d{3}|.)}{$v = $1; $v =~ /\A\d+\z/ ? chr($v) : $v}ge' input.y experr || exit 77 + +./output.at:328: ls "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.hh" +./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret --color=debug -Wall,cex input.y +stdout: +156. diagnostics.at:432: testing Screen width: 80 columns ... +./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.cc +`~!@#$%^&*()-=_+{}[]|\:;<>, .'.hh +152. diagnostics.at:328: ok +./output.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "\`~!@#\$%^&*()-=_+{}[]|\\:;<>, .'.cc" +157. diagnostics.at:465: testing Screen width: 60 columns ... +./output.at:342: ls "].cc" "].hh" +stderr: +./output.at:337: ls "#.cc" "#.hh" +./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y +stdout: +stdout: +].cc +].hh +stderr: +stdout: +./output.at:335: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "(.cc" --header="(.hh" cxx.y + +#.cc +#.hh +./output.at:342: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "].cc" +stdout: +./output.at:337: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "#.cc" +./diagnostics.at:399: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5214,19 +5271,7 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -stdout: -./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -@}.cc -@}.hh -151. diagnostics.at:303: ok -./output.at:340: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@}.cc" -./diagnostics.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -stderr: -153. diagnostics.at:351: ok -stdout: -156. diagnostics.at:432: testing Screen width: 80 columns ... -./output.at:342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "].cc" --header="].hh" cxx.y -./diagnostics.at:399: "$PERL" -pi -e ' +./diagnostics.at:372: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5234,29 +5279,19 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 +./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret --color=debug -Wall,cex input.y +./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.cc" --header="[.hh" cxx.y +./diagnostics.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y ./diagnostics.at:399: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=200 bison -fcaret -Wall,cex input.y - - -./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret --color=debug -Wall,cex input.y -stderr: -stdout: -157. diagnostics.at:465: testing Screen width: 60 columns ... -154. diagnostics.at:372: ok -./output.at:342: ls "].cc" "].hh" +158. diagnostics.at:504: testing Suggestions ... +./output.at:335: ls "(.cc" "(.hh" stderr: stdout: stdout: -./output.at:339: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@{.cc" --header="@{.hh" cxx.y -152. diagnostics.at:328: ok -158. diagnostics.at:504: testing Suggestions ... -./output.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "[.cc" --header="[.hh" cxx.y -].cc -].hh -155. diagnostics.at:399: ok -./output.at:342: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "].cc" -./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret --color=debug -Wall,cex input.y - -./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +(.cc +(.hh +./output.at:338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o "@@.cc" --header="@@.hh" cxx.y +./output.at:335: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "(.cc" ./diagnostics.at:432: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5265,15 +5300,9 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 - -./output.at:339: ls "@{.cc" "@{.hh" - +./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=80 bison -fcaret -Wall,cex input.y -./output.at:341: ls "[.cc" "[.hh" -159. diagnostics.at:527: testing Counterexamples ... -160. diagnostics.at:645: testing Deep Counterexamples ... -stdout: -./diagnostics.at:504: "$PERL" -pi -e ' +./diagnostics.at:465: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5281,16 +5310,7 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -stdout: -@{.cc -@{.hh -161. diagnostics.at:713: testing Indentation with message suppression ... -[.cc -[.hh -./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-other input.y -./output.at:339: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@{.cc" -./diagnostics.at:465: "$PERL" -pi -e ' +./diagnostics.at:504: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) { @@ -5298,11 +5318,39 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -./output.at:341: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "[.cc" -./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y ./diagnostics.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" COLUMNS=60 bison -fcaret -Wall,cex input.y +./diagnostics.at:504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +154. diagnostics.at:372: ok +155. diagnostics.at:399: ok 158. diagnostics.at:504: ok +./output.at:341: ls "[.cc" "[.hh" +stdout: +[.cc +[.hh +./output.at:338: ls "@@.cc" "@@.hh" +./output.at:341: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "[.cc" + +stdout: +156. diagnostics.at:432: ok +@@.cc +@@.hh +./output.at:338: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx.o -c "@@.cc" + + +157. diagnostics.at:465: ok +159. diagnostics.at:527: testing Counterexamples ... + +160. diagnostics.at:645: testing Deep Counterexamples ... +161. diagnostics.at:713: testing Indentation with message suppression ... +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-other input.y ./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y + +162. skeletons.at:25: testing Relative skeleton file names ... +./skeletons.at:27: mkdir tmp +./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret --color=debug -Wall,cex input.y +./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret tmp/input-gram.y +163. skeletons.at:85: testing Installed skeleton file names ... +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Werror ./diagnostics.at:645: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5311,13 +5359,10 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 +./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y ./diagnostics.at:645: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y -156. diagnostics.at:432: ok -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Werror - -160. diagnostics.at:645: ok -157. diagnostics.at:465: ok -stderr: +./skeletons.at:64: cat input-gram.tab.c +./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-gram.y ./diagnostics.at:527: "$PERL" -pi -e ' s{()}{ $1 eq "" ? $1 : "" }ge; if (/Example/) @@ -5326,9 +5371,9 @@ $_ = "" if $example % 2 == 0; } ' experr || exit 77 -162. skeletons.at:25: testing Relative skeleton file names ... -./skeletons.at:27: mkdir tmp +160. diagnostics.at:645: ok ./diagnostics.at:527: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; LC_ALL="$locale" bison -fcaret -Wall,cex input.y +stderr: input.y:2.1-12: error: deprecated directive: '%pure-parser', use '%define api.pure' [-Werror=deprecated] 2 | %pure-parser | ^~~~~~~~~~~~ @@ -5337,45 +5382,50 @@ 3 | %error-verbose | ^~~~~~~~~~~~~~ | %define parse.error verbose +./skeletons.at:69: cat input-gram.tab.c - - +./skeletons.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=tmp/skel.c tmp/input-cmd-line.y +./skeletons.at:121: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-cmd-line input-cmd-line.c $LIBS ./diagnostics.at:725: sed 's,.*/$,,' stderr 1>&2 -./skeletons.at:63: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret tmp/input-gram.y 164. skeletons.at:142: testing Boolean=variables: invalid skeleton defaults ... -./skeletons.at:155: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -163. skeletons.at:85: testing Installed skeleton file names ... -./skeletons.at:120: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=yacc.c -o input-cmd-line.c input-cmd-line.y ./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=error -165. skeletons.at:166: testing Complaining during macro argument expansion ... -./skeletons.at:64: cat input-gram.tab.c -./skeletons.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y +./skeletons.at:74: cat input-cmd-line.tab.c +./skeletons.at:155: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y 159. diagnostics.at:527: ok -./skeletons.at:68: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-gram.y -164. skeletons.at:142: ok +162. skeletons.at:25: ok -./skeletons.at:121: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-cmd-line input-cmd-line.c $LIBS -./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Wnone,none -Werror --trace=none -./skeletons.at:69: cat input-gram.tab.c -./skeletons.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --skeleton=tmp/skel.c tmp/input-cmd-line.y +164. skeletons.at:142: ok +./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y -Wnone,none -Werror --trace=none 166. skeletons.at:248: testing Fatal errors make M4 exit immediately ... +165. skeletons.at:166: testing Complaining during macro argument expansion ... +./skeletons.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y ./skeletons.at:262: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input1.y + 167. skeletons.at:302: testing Fatal errors but M4 continues producing output ... -./skeletons.at:74: cat input-cmd-line.tab.c ./skeletons.at:314: "$PERL" gen-skel.pl > skel.c || exit 77 -162. skeletons.at:25: ok -./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y -./skeletons.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y ./diagnostics.at:725: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wno-other input.y --warnings=none -Werror --trace=none - -168. sets.at:27: testing Nullable ... +./skeletons.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./skeletons.at:209: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y ./skeletons.at:279: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input2.y -./sets.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y 167. skeletons.at:302: ok + +166. skeletons.at:248: 161. diagnostics.at:713: ok + ok ./skeletons.at:223: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input3.y -161. diagnostics.at:713: ok +168. sets.at:27: testing Nullable ... +./sets.at:42: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y +stderr: +stdout: +./skeletons.at:122: $PREPARSER ./input-cmd-line +stderr: + +syntax error, unexpected 'a', expecting end of file +./skeletons.at:122: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input-gram.c input-gram.y +169. sets.at:111: testing Broken Closure ... +./sets.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y stderr: bison (GNU Bison) 3.8.2 RITEM @@ -5448,18 +5498,139 @@ State 3: rule 0: -166. skeletons.at:248: ok ./sets.at:43: sed -f extract.sed stderr - -169. sets.at:111: testing Broken Closure ... -./sets.at:125: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y -168. sets.at:27: ok 170. sets.at:153: testing Firsts ... -./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y ./skeletons.at:237: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input4.y +./sets.at:171: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=sets input.y +168. sets.at:27: ok + +./skeletons.at:127: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-gram input-gram.c $LIBS +stderr: +bison (GNU Bison) 3.8.2 +RITEM + 0: exp $end (rule 0) + 3: exp '<' exp (rule 1) + 7: exp '>' exp (rule 2) + 11: exp '+' exp (rule 3) + 15: exp '-' exp (rule 4) + 19: exp '^' exp (rule 5) + 23: exp '=' exp (rule 6) + 27: "exp" (rule 7) + + +DERIVES + $accept derives + 0 exp $end + exp derives + 1 exp '<' exp + 2 exp '>' exp + 3 exp '+' exp + 4 exp '-' exp + 5 exp '^' exp + 6 exp '=' exp + 7 "exp" + + +NULLABLE + $accept: no + exp: no + + +RTC: Firsts Input BEGIN + + 01 + .--. + 0| 1| + 1| 1| + `--' +RTC: Firsts Input END + +RTC: Firsts Output BEGIN + + 01 + .--. + 0|11| + 1| 1| + `--' +RTC: Firsts Output END + +FIRSTS + $accept firsts + $accept + exp + exp firsts + exp + + +FDERIVES + $accept derives + 0 exp $end + 1 exp '<' exp + 2 exp '>' exp + 3 exp '+' exp + 4 exp '-' exp + 5 exp '^' exp + 6 exp '=' exp + 7 "exp" + exp derives + 1 exp '<' exp + 2 exp '>' exp + 3 exp '+' exp + 4 exp '-' exp + 5 exp '^' exp + 6 exp '=' exp + 7 "exp" + + +relation_transpose: + 0: 1 2 3 4 5 6 + 1: 1 2 3 4 5 6 + 2: 1 2 3 4 5 6 + 3: 1 2 3 4 5 6 + 4: 1 2 3 4 5 6 + 5: 1 2 3 4 5 6 + 6: 1 2 3 4 5 6 + +relation_transpose: output: + 1: 0 1 2 3 4 5 6 + 2: 0 1 2 3 4 5 6 + 3: 0 1 2 3 4 5 6 + 4: 0 1 2 3 4 5 6 + 5: 0 1 2 3 4 5 6 + 6: 0 1 2 3 4 5 6 + +follows after includes: + FOLLOWS[goto[0] = (0, exp, 2)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[1] = (4, exp, 10)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[2] = (5, exp, 11)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[3] = (6, exp, 12)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[4] = (7, exp, 13)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[5] = (8, exp, 14)] = $end '<' '>' '+' '-' '^' '=' + FOLLOWS[goto[6] = (9, exp, 15)] = $end '<' '>' '+' '-' '^' '=' +Lookaheads: + State 1: + rule 7: + State 3: + rule 0: + State 10: + rule 1: $end '<' '>' '+' '-' '^' '=' + State 11: + rule 2: $end '<' '>' '+' '-' '^' '=' + State 12: + rule 3: $end '<' '>' '+' '-' '^' '=' + State 13: + rule 4: $end '<' '>' '+' '-' '^' '=' + State 14: + rule 5: $end '<' '>' '+' '-' '^' '=' + State 15: + rule 6: $end '<' '>' '+' '-' '^' '=' +165. skeletons.at:166: ok stderr: +./sets.at:172: sed -f extract.sed stderr +171. sets.at:228: testing Accept ... +./sets.at:240: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y bison (GNU Bison) 3.8.2 RITEM 0: a $end (rule 0) @@ -5701,155 +5872,12 @@ rule 0: ./sets.at:127: sed -n 's/[ ]*$//;/^RTC: Firsts Output BEGIN/,/^RTC: Firsts Output END/p' stderr -171. sets.at:228: testing Accept ... -./sets.at:240: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -stderr: -165. skeletons.at:166: ok -bison (GNU Bison) 3.8.2 -RITEM - 0: exp $end (rule 0) - 3: exp '<' exp (rule 1) - 7: exp '>' exp (rule 2) - 11: exp '+' exp (rule 3) - 15: exp '-' exp (rule 4) - 19: exp '^' exp (rule 5) - 23: exp '=' exp (rule 6) - 27: "exp" (rule 7) - - -DERIVES - $accept derives - 0 exp $end - exp derives - 1 exp '<' exp - 2 exp '>' exp - 3 exp '+' exp - 4 exp '-' exp - 5 exp '^' exp - 6 exp '=' exp - 7 "exp" - - -NULLABLE - $accept: no - exp: no - - -RTC: Firsts Input BEGIN - - 01 - .--. - 0| 1| - 1| 1| - `--' -RTC: Firsts Input END - -RTC: Firsts Output BEGIN - - 01 - .--. - 0|11| - 1| 1| - `--' -RTC: Firsts Output END - -FIRSTS - $accept firsts - $accept - exp - exp firsts - exp - - -FDERIVES - $accept derives - 0 exp $end - 1 exp '<' exp - 2 exp '>' exp - 3 exp '+' exp - 4 exp '-' exp - 5 exp '^' exp - 6 exp '=' exp - 7 "exp" - exp derives - 1 exp '<' exp - 2 exp '>' exp - 3 exp '+' exp - 4 exp '-' exp - 5 exp '^' exp - 6 exp '=' exp - 7 "exp" - - -relation_transpose: - 0: 1 2 3 4 5 6 - 1: 1 2 3 4 5 6 - 2: 1 2 3 4 5 6 - 3: 1 2 3 4 5 6 - 4: 1 2 3 4 5 6 - 5: 1 2 3 4 5 6 - 6: 1 2 3 4 5 6 - -relation_transpose: output: - 1: 0 1 2 3 4 5 6 - 2: 0 1 2 3 4 5 6 - 3: 0 1 2 3 4 5 6 - 4: 0 1 2 3 4 5 6 - 5: 0 1 2 3 4 5 6 - 6: 0 1 2 3 4 5 6 - -follows after includes: - FOLLOWS[goto[0] = (0, exp, 2)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[1] = (4, exp, 10)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[2] = (5, exp, 11)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[3] = (6, exp, 12)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[4] = (7, exp, 13)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[5] = (8, exp, 14)] = $end '<' '>' '+' '-' '^' '=' - FOLLOWS[goto[6] = (9, exp, 15)] = $end '<' '>' '+' '-' '^' '=' - -Lookaheads: - State 1: - rule 7: - State 3: - rule 0: - State 10: - rule 1: $end '<' '>' '+' '-' '^' '=' - State 11: - rule 2: $end '<' '>' '+' '-' '^' '=' - State 12: - rule 3: $end '<' '>' '+' '-' '^' '=' - State 13: - rule 4: $end '<' '>' '+' '-' '^' '=' - State 14: - rule 5: $end '<' '>' '+' '-' '^' '=' - State 15: - rule 6: $end '<' '>' '+' '-' '^' '=' - -172. sets.at:269: testing Build relations ... -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -./sets.at:172: sed -f extract.sed stderr 170. sets.at:153: ok 169. sets.at:111: ok -stderr: -stdout: - -./skeletons.at:122: $PREPARSER ./input-cmd-line -stderr: ./sets.at:243: sed -n 's/.*define YYFINAL *\([0-9][0-9]*\)/final state \1/p' input.c - -syntax error, unexpected 'a', expecting end of file -./skeletons.at:122: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -173. sets.at:315: testing Reduced Grammar ... -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y final state 6 -174. sets.at:394: testing Reduced Grammar with prec and assoc ... -stderr: -stdout: -./sets.at:412: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -./output.at:782: sed -ne 's/#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/include/ast/loc.hh ./sets.at:248: sed -n ' /^State \(.*\)/{ s//final state \1/ @@ -5861,12 +5889,26 @@ q } ' input.output -./skeletons.at:126: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input-gram.c input-gram.y -./output.at:794: sed -ne 's/^#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/x1.hh + + +172. sets.at:269: testing Build relations ... +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y +171. sets.at:228: ok +174. sets.at:394: testing Reduced Grammar with prec and assoc ... +./sets.at:412: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y +173. sets.at:315: testing Reduced Grammar ... +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=grammar -o input.c input.y + +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror 175. reduce.at:26: testing Useless Terminals ... ./reduce.at:47: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y -171. sets.at:228: ok -./output.at:806: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x2.cc -M out/=bar/ x2.yy +174. sets.at:394: ok +stderr: +stdout: +132. output.at:337: ok +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Werror + + stderr: input.y: error: 5 reduce/reduce conflicts [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples @@ -5885,15 +5927,15 @@ input.y:2.42-45: error: rule useless in parser due to conflicts [-Werror=other] 2 | expr: term | term | term | term | term | term | ^~~~ -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Werror -./sets.at:286: sed 's,.*/$,,' stderr 1>&2 - -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./output.at:806: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x2.o out/x2.cc -174. sets.at:394: ok -./skeletons.at:127: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input-gram input-gram.c $LIBS -./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output +176. reduce.at:70: testing Useless Nonterminals ... +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +stderr: stderr: +stdout: +stdout: +stderr: +./sets.at:286: sed 's,.*/$,,' stderr 1>&2 +135. output.at:340: ok bison (GNU Bison) 3.8.2 input.y: error: 1 nonterminal useless in grammar [-Werror=other] input.y: error: 1 rule useless in grammar [-Werror=other] @@ -5951,85 +5993,52 @@ reduced input.y defines 7 terminals, 4 nonterminals, and 6 productions. -176. reduce.at:70: testing Useless Nonterminals ... -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input.y +./reduce.at:49: sed -n '/^Grammar/q;/^$/!p' input.output +./skeletons.at:128: $PREPARSER ./input-gram +stderr: +syntax error, unexpected 'a', expecting end of file +./skeletons.at:128: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./sets.at:325: sed 's,.*/$,,' stderr 1>&2 -stdout: - -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error -175. reduce.at:26: stderr: - ok -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -131. output.at:336: ok -stdout: 177. reduce.at:120: testing Useless Rules ... ./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -129. output.at:328: ok -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror - - -stderr: stdout: -stderr: -stdout: -stderr: +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +175. reduce.at:26: ok +./sets.at:325: sed 's,.*/$,,' stderr 1>&2 +131. output.at:336: ok +163. skeletons.at:85: ok -./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -132. output.at:337: stdout: - ok -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none + +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Werror +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=error 178. reduce.at:224: testing Useless Parts ... ./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y + + 179. reduce.at:312: testing Reduced Automaton ... ./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret not-reduced.y -135. output.at:340: 130. output.at:335: ok - ok -stderr: -180. reduce.at:406: testing Underivable Rules ... -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y -stdout: - -stderr: -133. output.at:338: ok ./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +stderr: input.y: error: 3 nonterminals useless in grammar [-Werror=other] input.y: error: 3 rules useless in grammar [-Werror=other] input.y:11.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other] input.y:12.1-8: error: nonterminal useless in grammar: useless2 [-Werror=other] input.y:13.1-8: error: nonterminal useless in grammar: useless3 [-Werror=other] - -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Werror - -181. reduce.at:452: testing Bad start symbols ... -172. sets.at:269: ok +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y -Wnone,none -Werror --trace=none +180. reduce.at:406: testing Underivable Rules ... +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.y ./reduce.at:89: sed 's,.*/$,,' stderr 1>&2 -./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none +181. reduce.at:452: testing Bad start symbols ... ./reduce.at:467: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - ./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Werror -183. reduce.at:550: testing lr.type=lalr: Single State Split ... -182. reduce.at:550: testing no lr.type: Single State Split ... -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error - stderr: +stdout: +./output.at:782: sed -ne 's/#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/include/ast/loc.hh +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=error +./sets.at:286: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./output.at:794: sed -ne 's/^#line [0-9][0-9]* "/#line "/p;/INCLUDED/p;/\\file/{p;n;p;}' out/x1.hh stderr: -not-reduced.y: error: 2 nonterminals useless in grammar [-Werror=other] -not-reduced.y: error: 3 rules useless in grammar [-Werror=other] -not-reduced.y:14.1-13: error: nonterminal useless in grammar: not_reachable [-Werror=other] - 14 | not_reachable: useful { /* A not reachable action. */ } - | ^~~~~~~~~~~~~ -not-reduced.y:17.1-14: error: nonterminal useless in grammar: non_productive [-Werror=other] - 17 | non_productive: non_productive useless_token - | ^~~~~~~~~~~~~~ -not-reduced.y:11.6-57: error: rule useless in grammar [-Werror=other] - 11 | | non_productive { /* A non productive action. */ } - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -184. reduce.at:550: testing lr.type=ielr: Single State Split ... input.y: error: 9 nonterminals useless in grammar [-Werror=other] input.y: error: 9 rules useless in grammar [-Werror=other] input.y:10.1-8: error: nonterminal useless in grammar: useless1 [-Werror=other] @@ -6059,29 +6068,24 @@ input.y:18.1-8: error: nonterminal useless in grammar: useless9 [-Werror=other] 18 | useless9: '9'; | ^~~~~~~~ -./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -173. sets.at:315: ok -./reduce.at:341: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:550: sed -n '/^State 0$/,$p' input.output -185. reduce.at:550: testing lr.type=canonical-lr: Single State Split ... +./output.at:806: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o out/x2.cc -M out/=bar/ x2.yy +./reduce.at:473: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:146: sed 's,.*/$,,' stderr 1>&2 +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Werror +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Werror +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error stderr: -stdout: input.y: error: 1 nonterminal useless in grammar [-Werror=other] input.y: error: 1 rule useless in grammar [-Werror=other] input.y:18.1-6: error: nonterminal useless in grammar: unused [-Werror=other] 18 | unused | ^~~~~~ -./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error -./reduce.at:146: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -136. output.at:341: ok -./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./sets.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=grammar -o input.c input.y --warnings=none -Werror --trace=none +./reduce.at:480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none ./reduce.at:261: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=error - +172. sets.at:269: ok +stderr: stderr: input.y: error: 2 nonterminals useless in grammar [-Werror=other] input.y: error: 3 rules useless in grammar [-Werror=other] @@ -6094,291 +6098,346 @@ input.y:5.15-25: error: rule useless in grammar [-Werror=other] 5 | exp: useful | underivable; | ^~~~~~~~~~~ -./reduce.at:550: sed -n '/^State 0$/,$p' input.output -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -Wnone,none -Werror --trace=none -./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +not-reduced.y: error: 2 nonterminals useless in grammar [-Werror=other] +not-reduced.y: error: 3 rules useless in grammar [-Werror=other] +not-reduced.y:14.1-13: error: nonterminal useless in grammar: not_reachable [-Werror=other] + 14 | not_reachable: useful { /* A not reachable action. */ } + | ^~~~~~~~~~~~~ +not-reduced.y:17.1-14: error: nonterminal useless in grammar: non_productive [-Werror=other] + 17 | non_productive: non_productive useless_token + | ^~~~~~~~~~~~~~ +not-reduced.y:11.6-57: error: rule useless in grammar [-Werror=other] + 11 | | non_productive { /* A non productive action. */ } + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +./output.at:806: $CXX $CPPFLAGS $CXXFLAGS -Iout/include -c -o out/x2.o out/x2.cc +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=error stderr: stdout: -stderr: -137. output.at:342: ok - ./reduce.at:420: sed 's,.*/$,,' stderr 1>&2 -./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./reduce.at:341: sed 's,.*/$,,' stderr 1>&2 +./reduce.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +137. output.at:342: 173. sets.at:315: ok + ok +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=error +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error +./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none +182. reduce.at:550: testing no lr.type: Single State Split ... +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y + +./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y + +stderr: +./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none stdout: +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none +183. reduce.at:550: testing lr.type=lalr: Single State Split ... +184. reduce.at:550: testing lr.type=ielr: Single State Split ... +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +181. reduce.at:452: 130. output.at:335: ./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y + ok + ok +./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output +./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none ./reduce.at:550: sed -n '/^State 0$/,$p' input.output -./skeletons.at:128: $PREPARSER ./input-gram -186. reduce.at:783: testing no lr.type: Lane Split ... -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -./reduce.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +./reduce.at:109: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output + +./reduce.at:213: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=none -Werror --trace=none +./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none ./reduce.at:550: sed -n '/^State 0$/,$p' input.output -syntax error, unexpected 'a', expecting end of file -./skeletons.at:128: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y -Wnone,none -Werror --trace=none -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=error -187. reduce.at:783: testing lr.type=lalr: Lane Split ... +./reduce.at:550: sed -n '/^State 0$/,$p' input.output +185. reduce.at:550: testing lr.type=canonical-lr: Single State Split ... +186. reduce.at:783: testing no lr.type: Lane Split ... +./reduce.at:550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y - ./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -163. skeletons.at:85: ok -181. reduce.at:452: ok -./reduce.at:89: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y --warnings=none -Werror --trace=none -./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y -Wnone,none -Werror --trace=none -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./reduce.at:783: sed -n '/^State 0$/,$p' input.output -188. reduce.at:783: testing lr.type=ielr: Lane Split ... - -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -rall -o input.c input.y --warnings=none -Werror --trace=none - -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:97: sed -n '/^Grammar/q;/^$/!p' input.output ./reduce.at:341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret not-reduced.y --warnings=none -Werror --trace=none -189. reduce.at:783: testing lr.type=canonical-lr: Lane Split ... -./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y -Wnone,none -Werror --trace=none -./reduce.at:783: sed -n '/^State 0$/,$p' input.output -190. reduce.at:1027: testing no lr.type: Complex Lane Split ... -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:109: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./reduce.at:146: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:783: sed -n '/^State 0$/,$p' input.output ./reduce.at:270: sed -n '/^State 0/q;/^$/!p' input.output +stderr: +./reduce.at:550: sed -n '/^State 0$/,$p' input.output +./reduce.at:298: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +stdout: ./reduce.at:355: sed -n '/^Grammar/q;/^$/!p' not-reduced.output +176. reduce.at:70: ok +stderr: +./reduce.at:434: sed -n '/^Grammar/q;/^$/!p' input.output ./reduce.at:783: sed -n '/^State 0$/,$p' input.output -./reduce.at:298: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./reduce.at:420: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.y --warnings=none -Werror --trace=none +stdout: ./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:550: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./reduce.at:392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret reduced.y -./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +134. output.at:339: ok + +180. reduce.at:406: ok stderr: stdout: -./reduce.at:550: $PREPARSER ./input + stderr: -syntax error -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:1027: sed -n '/^State 0$/,$p' input.output +129. output.at:328: ok +stdout: +187. reduce.at:783: testing lr.type=lalr: Lane Split ... +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +177. reduce.at:120: ok +stderr: + stderr: +188. reduce.at:783: testing lr.type=ielr: Lane Split ... stdout: -./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stdout: +133. output.at:338: ok ./reduce.at:550: $PREPARSER ./input stderr: -./reduce.at:179: sed -n '/^Grammar/q;/^$/!p' input.output -182. reduce.at:550: ok -stderr: syntax error ./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./reduce.at:434: sed -n '/^Grammar/q;/^$/!p' input.output -./reduce.at:213: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -134. output.at:339: ok -./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c -stderr: +182. reduce.at:550: ok +./reduce.at:783: sed -n '/^State 0$/,$p' input.output +stderr: stdout: -183. reduce.at:550: 179. reduce.at:312: ok -180. reduce.at:406: ok - ok -176. reduce.at:70: ok +./reduce.at:396: sed 's/not-reduced/reduced/g' not-reduced.c +189. reduce.at:783: testing lr.type=canonical-lr: Lane Split ... +./reduce.at:783: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +178. reduce.at:224: ok +190. reduce.at:1027: testing no lr.type: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +179. reduce.at:312: ok +./reduce.at:783: sed -n '/^State 0$/,$p' input.output 191. reduce.at:1027: testing lr.type=lalr: Complex Lane Split ... +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: ./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stdout: - +136. output.at:341: ok stderr: - +192. reduce.at:1027: testing lr.type=ielr: Complex Lane Split ... +./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stderr: +./reduce.at:783: sed -n '/^State 0$/,$p' input.output +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +./reduce.at:550: $PREPARSER ./input stdout: +stderr: +syntax error +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./reduce.at:550: $PREPARSER ./input -178. reduce.at:224: ok -192. reduce.at:1027: testing lr.type=ielr: Complex Lane Split ... 193. reduce.at:1027: testing lr.type=canonical-lr: Complex Lane Split ... ./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ... -./reduce.at:1027: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -196. reduce.at:1296: testing lr.type=ielr: Split During Added Lookahead Propagation ... +stderr: 195. reduce.at:1296: testing lr.type=lalr: Split During Added Lookahead Propagation ... ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +183. reduce.at:550: ok +184. reduce.at:550: ok +./reduce.at:783: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output -stdout: -stderr: -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output -stdout: +194. reduce.at:1296: testing no lr.type: Split During Added Lookahead Propagation ... +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: +stdout: +./reduce.at:1027: sed -n '/^State 0$/,$p' input.output ./reduce.at:783: $PREPARSER ./input stderr: ./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS syntax error -stdout: ./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:550: $PREPARSER ./input -197. reduce.at:1296: testing lr.type=canonical-lr: Split During Added Lookahead Propagation ... -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -stderr: -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -177. reduce.at:120: ok -stdout: stdout: -185. reduce.at:550: ok -186. reduce.at:783: ok ./reduce.at:550: $PREPARSER ./input -stderr: -stderr: -./reduce.at:783: $PREPARSER ./input -./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:1027: sed -n '/^State 0$/,$p' input.output stderr: -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror - -stdout: -./reduce.at:783: $PREPARSER ./input -184. reduce.at:550: ok -syntax error -stderr: +./reduce.at:550: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output -stderr: + +196. reduce.at:1296: testing lr.type=ielr: Split During Added Lookahead Propagation ... +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y ./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stdout: -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -./reduce.at:1027: $PREPARSER ./input -198. reduce.at:1627: testing no lr.default-reduction ... -188. reduce.at:783: ok +186. reduce.at:783: ok ./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -stdout: -199. reduce.at:1627: testing lr.default-reduction=most ... -./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: +./reduce.at:1027: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +197. reduce.at:1296: testing lr.type=canonical-lr: Split During Added Lookahead Propagation ... +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +185. reduce.at:550: ok +198. reduce.at:1627: testing no lr.default-reduction ... ./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -187. reduce.at:783: ok -syntax error -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./reduce.at:783: $PREPARSER ./input -stderr: -./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:1296: sed -n '/^State 0$/,$p' input.output -190. reduce.at:1027: ok +199. reduce.at:1627: testing lr.default-reduction=most ... +./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output stderr: - -189. reduce.at:783: - ok +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 200. reduce.at:1627: testing lr.default-reduction=consistent ... ./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 +stderr: +stdout: +./reduce.at:783: $PREPARSER ./input stderr: +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stderr: +syntax error +./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +187. reduce.at:783: ok +./reduce.at:783: $PREPARSER ./input +stderr: +./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 +stderr: +stderr: +stdout: +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +stdout: +./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:783: $PREPARSER ./input +stderr: -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -202. report.at:37: testing Reports ... +./reduce.at:1027: $PREPARSER ./input +stdout: +stderr: +189. reduce.at:783: ok +./reduce.at:783: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1027: $PREPARSER ./input +188. reduce.at:783: ok +stderr: +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +syntax error +./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +192. reduce.at:1027: ok 201. reduce.at:1627: testing lr.default-reduction=accepting ... ./reduce.at:1627: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 -202. report.at:37: ./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -./reduce.at:1296: sed 's,.*/$,,' stderr 1>&2 - skipped (report.at:75) -203. report.at:3123: testing Reports with conflicts ... -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -203. report.at:3123: ./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -204. conflicts.at:28: testing Token declaration order ... -./conflicts.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +190. reduce.at:1027: + ok -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error - skipped (report.at:3132) -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +202. report.at:37: testing Reports ... +202. report.at:37: stderr: -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -206. conflicts.at:183: testing Useless associativity warning ... -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence input.y -./reduce.at:1627: sed -n '/^State 0$/,$p' input.output -205. conflicts.at:101: testing Token declaration order: literals vs. identifiers ... +stdout: +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stderr: -./conflicts.at:130: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y +203. report.at:3123: testing Reports with conflicts ... + skipped (report.at:75) stdout: -207. conflicts.at:218: testing Useless precedence warning ... -./conflicts.at:82: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1627: $PREPARSER ./input ./reduce.at:1027: $PREPARSER ./input -./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y +stderr: +204. conflicts.at:28: testing Token declaration order ... +./conflicts.at:81: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +205. conflicts.at:101: testing Token declaration order: literals vs. identifiers ... +./conflicts.at:130: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y stderr: syntax error ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./reduce.at:1027: $PREPARSER ./input stderr: -stderr: -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stdout: -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Werror -./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:1296: $PREPARSER ./input -191. reduce.at:1027: ok -stderr: -192. reduce.at:1027: ok -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./reduce.at:1027: $PREPARSER ./input stderr: -196. reduce.at:1296: ok +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +203. report.at:3123: stdout: + skipped (report.at:3132) +199. reduce.at:1627: ok +stderr: ./reduce.at:1027: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +./reduce.at:1627: $PREPARSER ./input +191. reduce.at:1027: ok stderr: +196. reduce.at:1296: ok stderr: -input.y:2.1-9: error: useless precedence and associativity for "=" [-Werror=precedence] -input.y:4.1-5: error: useless associativity for "*", use %precedence [-Werror=precedence] -input.y:5.1-11: error: useless precedence for "(" [-Werror=precedence] -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Werror +./reduce.at:1627: sed -n '/^State 0$/,$p' input.output +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none 193. reduce.at:1027: ok -./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2 -208. conflicts.at:275: testing S/R in initial ... -stderr: ./reduce.at:1296: $PREPARSER ./input + +206. conflicts.at:183: testing Useless associativity warning ... stderr: +198. reduce.at:1627: ok +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence input.y ./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1627: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -209. conflicts.at:301: testing %nonassoc and eof ... -./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +207. conflicts.at:218: testing Useless precedence warning ... +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y + + +./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none 197. reduce.at:1296: ok -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=error +209. conflicts.at:301: testing %nonassoc and eof ... stderr: -stdout: -./reduce.at:1296: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:84: $PREPARSER ./input +./conflicts.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:131: cat input.output | sed -n '/^State 0$/,/^State 1$/p' +./reduce.at:1296: sed -n '/^State 0$/,$p' input.output 210. conflicts.at:509: testing parse.error=verbose and consistent errors: lr.type=ielr ... -stderr: -205. conflicts.at:101: ok -./conflicts.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + ./conflicts.at:509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:84: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:82: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +208. conflicts.at:275: testing S/R in initial ... +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +211. conflicts.at:513: testing parse.error=verbose and consistent errors: lr.type=ielr %glr-parser ... +212. conflicts.at:518: testing parse.error=verbose and consistent errors: lr.type=ielr c++ ... +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Werror +205. conflicts.at:101: ok +./conflicts.at:513: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +213. conflicts.at:523: testing parse.error=verbose and consistent errors: lr.type=ielr java ... +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Werror +./conflicts.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1627: $PREPARSER ./input +./conflicts.at:523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y +stderr: ./reduce.at:1296: sed -n '/^State 0$/,$p' input.output +./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:509: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +200. reduce.at:1627: ok + +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror stderr: input.y:7.1-9: error: useless precedence and associativity for U [-Werror=precedence] 7 | %nonassoc U @@ -6392,548 +6451,531 @@ input.y:2.1-11: error: useless precedence for Z [-Werror=precedence] 2 | %precedence Z | ^~~~~~~~~~~ +./conflicts.at:513: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -204. conflicts.at:28: ok +214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ... ./conflicts.at:248: sed 's,.*/$,,' stderr 1>&2 -211. conflicts.at:513: testing parse.error=verbose and consistent errors: lr.type=ielr %glr-parser ... -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -stdout: -212. conflicts.at:518: testing parse.error=verbose and consistent errors: lr.type=ielr c++ ... -./conflicts.at:518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:513: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stdout: -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=error - -./reduce.at:1627: $PREPARSER ./input -./reduce.at:1627: $PREPARSER ./input -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -stderr: -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none -stderr: -./conflicts.at:509: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +input.y:2.1-9: error: useless precedence and associativity for "=" [-Werror=precedence] +input.y:4.1-5: error: useless associativity for "*", use %precedence [-Werror=precedence] +input.y:5.1-11: error: useless precedence for "(" [-Werror=precedence] +./conflicts.at:205: sed 's,.*/$,,' stderr 1>&2 stderr: -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -199. reduce.at:1627: ok -stdout: -./reduce.at:1627: $PREPARSER ./input -stderr: -./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -200. reduce.at:1627: ok -213. conflicts.at:523: testing parse.error=verbose and consistent errors: lr.type=ielr java ... -stderr: -./conflicts.at:523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] ./conflicts.at:518: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -198. reduce.at:1627: ok -214. conflicts.at:530: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=consistent ... -./conflicts.at:530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - - -./reduce.at:1296: sed -n '/^State 0$/,$p' input.output -./conflicts.at:284: sed 's,.*/$,,' stderr 1>&2 - -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./reduce.at:1296: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=error 215. conflicts.at:535: testing parse.error=verbose and consistent errors: lr.type=ielr lr.default-reduction=accepting ... ./conflicts.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:513: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... -./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -213. conflicts.at:523: ./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -stderr: +213. conflicts.at:523: stderr: +./conflicts.at:284: sed 's,.*/$,,' stderr 1>&2 +stdout: skipped (conflicts.at:523) -217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ... -./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=error +stderr: stdout: -./conflicts.at:530: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=none -Werror --trace=none +./conflicts.at:84: $PREPARSER ./input ./reduce.at:1627: $PREPARSER ./input -./conflicts.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none +stderr: stderr: -./conflicts.at:540: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:84: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:530: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error ./reduce.at:1627: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +204. conflicts.at:28: ok +216. conflicts.at:540: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... +./conflicts.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 201. reduce.at:1627: ok -./conflicts.at:546: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + + +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y -Wnone,none -Werror --trace=none 218. conflicts.at:551: testing parse.error=verbose and consistent errors: lr.type=ielr parse.lac=full ... +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y -Wnone,none -Werror --trace=none +217. conflicts.at:546: testing parse.error=verbose and consistent errors: lr.type=canonical-lr parse.lac=full ... ./conflicts.at:551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none - -206. conflicts.at:183: ok -./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:540: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -stderr: +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:248: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence -fcaret -o input.c input.y --warnings=none -Werror --trace=none ./reduce.at:1296: $PREPARSER ./input -219. conflicts.at:558: testing parse.error=verbose and consistent errors: c++ lr.type=canonical-lr parse.lac=full ... -./conflicts.at:558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stdout: +stderr: +./conflicts.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wprecedence input.y --warnings=none -Werror --trace=none ./conflicts.at:551: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./conflicts.at:368: $PREPARSER ./input '0<0' -207. conflicts.at:218: ok - +stdout: syntax error +./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./reduce.at:1296: $PREPARSER ./input stderr: +syntax error ./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -./conflicts.at:368: $PREPARSER ./input '0<0<0' +./conflicts.at:546: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +195. reduce.at:1296: ok +194. reduce.at:1296: ok +./conflicts.at:284: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +206. conflicts.at:183: ok -stderr: +207. conflicts.at:218: ok + + +219. conflicts.at:558: testing parse.error=verbose and consistent errors: c++ lr.type=canonical-lr parse.lac=full ... + +./conflicts.at:558: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y 220. conflicts.at:564: testing parse.error=verbose and consistent errors: c++ lr.type=ielr parse.lac=full ... ./conflicts.at:564: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +221. conflicts.at:622: testing parse.error=verbose and consistent errors: ... +./conflicts.at:622: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -./conflicts.at:558: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stdout: +222. conflicts.at:626: testing parse.error=verbose and consistent errors: %glr-parser ... +./conflicts.at:368: $PREPARSER ./input '0<0' +./conflicts.at:626: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:368: $PREPARSER ./input '0<0<0' +stderr: +./conflicts.at:564: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS syntax error, unexpected '<' ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -195. reduce.at:1296: ok -./reduce.at:1296: $PREPARSER ./input +./conflicts.at:558: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror ./conflicts.at:368: $PREPARSER ./input '0>0' +./conflicts.at:622: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -syntax error -./reduce.at:1296: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -221. conflicts.at:622: testing parse.error=verbose and consistent errors: ... -./conflicts.at:622: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:626: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:368: $PREPARSER ./input '0>0>0' - stderr: syntax error, unexpected '>' ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:564: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -194. reduce.at:1296: ok -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -stdout: -stderr: ./conflicts.at:368: $PREPARSER ./input '0<0>0' -./conflicts.at:509: $PREPARSER ./input -stdout: -222. conflicts.at:626: testing parse.error=verbose and consistent errors: %glr-parser ... stderr: -stderr: -syntax error, unexpected end of file -./conflicts.at:626: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:509: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error, unexpected '>' ./conflicts.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./output.at:835: $CXX $CPPFLAGS $CXXFLAGS -Iout/ $LDFLAGS -o parser out/x[12].o main.cc $LIBS -./conflicts.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.default-reduction=consistent -o input.c input.y -./conflicts.at:622: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -210. conflicts.at:509: ok stderr: +stderr: +stderr: +stdout: input.y:4.10-15: error: rule useless in parser due to conflicts [-Werror=other] 4 | e: 'e' | %empty; | ^~~~~~ -223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ... -./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:372: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:288: sed 's,.*/$,,' stderr 1>&2 - +./conflicts.at:509: $PREPARSER ./input +stdout: stderr: -./conflicts.at:626: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:530: $PREPARSER ./input +./conflicts.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.default-reduction=consistent -o input.c input.y +syntax error, unexpected end of file +stderr: +./conflicts.at:509: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:288: sed 's,.*/$,,' stderr 1>&2 +syntax error, unexpected end of file, expecting 'a' or 'b' +./conflicts.at:530: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +210. conflicts.at:509: ok +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +214. conflicts.at:530: ok stderr: stdout: +stderr: + stdout: -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +./conflicts.at:372: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + ./conflicts.at:535: $PREPARSER ./input -./conflicts.at:530: $PREPARSER ./input -stderr: -224. conflicts.at:638: testing parse.error=verbose and consistent errors: lr.default-reduction=accepting ... +./output.at:835: $CXX $CPPFLAGS $CXXFLAGS -Iout/ $LDFLAGS -o parser out/x[12].o main.cc $LIBS stderr: syntax error, unexpected end of file, expecting 'a' or 'b' -./conflicts.at:530: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:535: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +224. conflicts.at:638: testing parse.error=verbose and consistent errors: lr.default-reduction=accepting ... ./conflicts.at:638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -syntax error, unexpected end of file, expecting 'a' or 'b' +223. conflicts.at:632: testing parse.error=verbose and consistent errors: lr.default-reduction=consistent ... +./conflicts.at:632: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +215. conflicts.at:535: ok + +./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none stderr: -./conflicts.at:535: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:638: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: +225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... +./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:540: $PREPARSER ./input -./conflicts.at:632: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: syntax error, unexpected end of file, expecting 'a' or 'b' +./conflicts.at:632: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:540: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -214. conflicts.at:530: ok -215. conflicts.at:535: ok 216. conflicts.at:540: ok -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none - -./conflicts.at:638: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +./conflicts.at:551: $PREPARSER ./input +stderr: +syntax error, unexpected end of file, expecting 'b' +./conflicts.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:642: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +208. conflicts.at:275: ok +218. conflicts.at:551: ok stderr: +226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ... +./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stdout: ./conflicts.at:546: $PREPARSER ./input stderr: -225. conflicts.at:642: testing parse.error=verbose and consistent errors: lr.type=canonical-lr ... -./conflicts.at:642: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + syntax error, unexpected end of file, expecting 'b' ./conflicts.at:546: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -226. conflicts.at:647: testing parse.error=verbose and consistent errors: parse.lac=full ... -./conflicts.at:647: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: 227. conflicts.at:651: testing parse.error=verbose and consistent errors: parse.lac=full lr.default-reduction=accepting ... -./conflicts.at:288: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:551: $PREPARSER ./input -stderr: -217. conflicts.at:546: ok ./conflicts.at:651: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -218. conflicts.at:551: ok - -./conflicts.at:647: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:642: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - +217. conflicts.at:546: ok 228. conflicts.at:676: testing LAC: %nonassoc requires splitting canonical LR states ... ./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y +./conflicts.at:647: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + 229. conflicts.at:764: testing Unresolved SR Conflicts ... -./conflicts.at:651: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y -208. conflicts.at:275: ok +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Werror +./conflicts.at:651: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror stderr: +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples stderr: stdout: -./conflicts.at:372: $PREPARSER ./input '0<0' stderr: - -./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:372: $PREPARSER ./input '0<0' stdout: +stderr: +./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:622: $PREPARSER ./input -./conflicts.at:372: $PREPARSER ./input '0<0<0' -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror +./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./conflicts.at:372: $PREPARSER ./input '0<0<0' +syntax error, unexpected 'b' stderr: +./conflicts.at:622: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error syntax error, unexpected '<', expecting end of file ./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected 'b' -230. conflicts.at:887: testing Resolved SR Conflicts ... -./conflicts.at:898: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y -./conflicts.at:622: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Werror +stderr: ./conflicts.at:372: $PREPARSER ./input '0>0' +221. conflicts.at:622: ok +input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples stderr: ./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:774: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:372: $PREPARSER ./input '0>0>0' stderr: -221. conflicts.at:622: ok syntax error, unexpected '>', expecting end of file ./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stdout: -input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples + +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error ./conflicts.at:372: $PREPARSER ./input '0<0>0' +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none stderr: syntax error, unexpected '>', expecting end of file ./conflicts.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:901: cat input.output +230. conflicts.at:887: testing Resolved SR Conflicts ... +./conflicts.at:898: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y +./conflicts.at:381: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y +stderr: +stdout: ./conflicts.at:632: $PREPARSER ./input -230. conflicts.at:887: ok +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none stderr: syntax error, unexpected 'b' ./conflicts.at:632: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:381: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -./conflicts.at:774: sed 's,.*/$,,' stderr 1>&2 - - -stderr: -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error -232. conflicts.at:1015: testing %precedence does not suffice ... -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -223. conflicts.at:632: ok -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -231. conflicts.at:989: testing %precedence suffices ... +./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:901: cat input.output ./conflicts.at:381: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:1006: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:726: sed 's,.*/$,,' stderr 1>&2 -stderr: +223. conflicts.at:632: ok stderr: stdout: +230. conflicts.at:887: ok +stderr: ./conflicts.at:513: $PREPARSER ./input - -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=error +stdout: stderr: +./conflicts.at:642: $PREPARSER ./input syntax error, unexpected end of file +./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected end of file, expecting 'a' +./conflicts.at:642: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + +./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none +211. conflicts.at:513: ok +225. conflicts.at:642: ok +./conflicts.at:731: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +231. conflicts.at:989: testing %precedence suffices ... +stderr: +./conflicts.at:1006: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stdout: ./conflicts.at:638: $PREPARSER ./input -./conflicts.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +232. conflicts.at:1015: testing %precedence does not suffice ... +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + stderr: syntax error, unexpected end of file, expecting 'a' ./conflicts.at:638: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -211. conflicts.at:513: ok + +224. conflicts.at:638: ok 233. conflicts.at:1096: testing Syntax error in consistent error state: yacc.c ... +234. conflicts.at:1096: testing Syntax error in consistent error state: glr.c ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror 231. conflicts.at:989: ok -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none -224. conflicts.at:638: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./conflicts.at:780: cat input.output +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +229. conflicts.at:764: ok stderr: -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y -Wnone,none -Werror --trace=none -234. conflicts.at:1096: testing Syntax error in consistent error state: glr.c ... +stdout: +235. conflicts.at:1096: testing Syntax error in consistent error state: lalr1.cc ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./conflicts.at:651: $PREPARSER ./input +stderr: +syntax error, unexpected end of file +./conflicts.at:651: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +236. conflicts.at:1096: testing Syntax error in consistent error state: glr.cc ... +227. conflicts.at:651: ok +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:12.3-18: error: rule useless in parser due to conflicts [-Werror=other] -235. conflicts.at:1096: testing Syntax error in consistent error state: lalr1.cc ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:774: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none +237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ... ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror ./conflicts.at:1033: sed 's,.*/$,,' stderr 1>&2 + stderr: input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:726: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -o input.c input.y --warnings=none -Werror --trace=none ./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -236. conflicts.at:1096: testing Syntax error in consistent error state: glr.cc ... -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:780: cat input.output ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 stderr: +input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] +238. conflicts.at:1127: testing Defaulted Conflicted Reduction ... +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -229. conflicts.at:764: ok +stderr: stdout: -./conflicts.at:642: $PREPARSER ./input +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror +./conflicts.at:626: $PREPARSER ./input stderr: +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror stderr: -syntax error, unexpected end of file, expecting 'a' -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./conflicts.at:642: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror +syntax error, unexpected 'b' +./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:647: $PREPARSER ./input +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +222. conflicts.at:626: ok +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error +stderr: ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +stdout: stderr: +./conflicts.at:647: $PREPARSER ./input +input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -225. conflicts.at:642: ok +stderr: syntax error, unexpected 'b' ./conflicts.at:647: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:731: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -226. conflicts.at:647: ok -237. conflicts.at:1096: testing Syntax error in consistent error state: glr2.cc ... +./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror -input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] - ./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:4.6-8: error: rule useless in parser due to conflicts [-Werror=other] +./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +226. conflicts.at:647: ok +239. conflicts.at:1264: testing %expect not enough ... +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error stderr: -stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1138: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none stdout: +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +stderr: +./conflicts.at:518: $PREPARSER ./input stdout: +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error ./conflicts.at:381: $PREPARSER ./input '0<0' -./conflicts.at:1033: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 stderr: -./conflicts.at:651: $PREPARSER ./input - -./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +239. conflicts.at:1264: ok +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error stderr: -238. conflicts.at:1127: testing Defaulted Conflicted Reduction ... -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c --report=all input.y syntax error, unexpected end of file -./conflicts.at:651: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./conflicts.at:381: $PREPARSER ./input '0<0<0' stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error +212. conflicts.at:518: ok syntax error, unexpected '<', expecting end of file ./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -227. conflicts.at:651: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Werror -239. conflicts.at:1264: testing %expect not enough ... -./conflicts.at:1273: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none stderr: -input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] +232. conflicts.at:1015: ok +stdout: + +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none +./conflicts.at:732: $PREPARSER ./input ./conflicts.at:381: $PREPARSER ./input '0>0' -239. conflicts.at:1264: ok stderr: -232. conflicts.at:1015: ok +stderr: +240. conflicts.at:1284: testing %expect right ... +./conflicts.at:1293: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +syntax error, unexpected 'a', expecting 'b' +./conflicts.at:732: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y + ./conflicts.at:381: $PREPARSER ./input '0>0>0' -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Werror stderr: +241. conflicts.at:1301: testing %expect too much ... +./conflicts.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none syntax error, unexpected '>', expecting end of file -stderr: ./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./conflicts.at:626: $PREPARSER ./input -stderr: -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error -stdout: ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none -./output.at:836: $PREPARSER ./parser +./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:381: $PREPARSER ./input '0<0>0' stderr: - -240. conflicts.at:1284: testing %expect right ... -./conflicts.at:1293: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -syntax error, unexpected 'b' -./conflicts.at:626: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +241. conflicts.at:1301: ok syntax error, unexpected '>', expecting end of file ./conflicts.at:381: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input.y:17.5-25: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.5-29: error: rule useless in parser due to conflicts [-Werror=other] -144. output.at:744: ok -./conflicts.at:388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.c input.y -222. conflicts.at:626: ok -241. conflicts.at:1301: testing %expect too much ... -./conflicts.at:1096: sed 's,.*/$,,' stderr 1>&2 +240. conflicts.at:1284: ok 242. conflicts.at:1321: testing %expect with reduce conflicts ... -./conflicts.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -stderr: - -./conflicts.at:1330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:4.6-8: error: rule useless in parser due to conflicts [-Werror=other] ./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none -240. conflicts.at:1284: ok -241. conflicts.at:1301: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=error - -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none -./conflicts.at:1138: sed 's,.*/$,,' stderr 1>&2 -242. conflicts.at:1321: ok -./conflicts.at:388: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - +./conflicts.at:1330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y 243. conflicts.at:1341: testing %expect in grammar rule not enough ... ./conflicts.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.c input.y +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y -Werror + +242. conflicts.at:1321: ok +243. conflicts.at:1341: ok +./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none 244. conflicts.at:1360: testing %expect in grammar rule right ... ./conflicts.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=error -243. conflicts.at:1341: ok -./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -246. conflicts.at:1396: testing %expect in grammar rule too much ... -./conflicts.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none + +./conflicts.at:388: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 245. conflicts.at:1377: testing %expect in grammar rules ... ./conflicts.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -rall input.y +stderr: +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +246. conflicts.at:1396: testing %expect in grammar rule too much ... +./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2 247. conflicts.at:1415: testing %expect-rr in grammar rule ... +./conflicts.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none -./conflicts.at:1096: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +244. conflicts.at:1360: ok 246. conflicts.at:1396: ok -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y -Wnone,none -Werror --trace=none +./conflicts.at:1145: cat input.output +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y --warnings=error +238. conflicts.at:1127: ok +247. conflicts.at:1415: ok + +./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +245. conflicts.at:1377: ok 248. conflicts.at:1440: testing %expect-rr too much in grammar rule ... + ./conflicts.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y -Wnone,none -Werror --trace=none -245. conflicts.at:1377: ok -244. conflicts.at:1360: ok -stderr: -247. conflicts.at:1415: ok -./conflicts.at:1096: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -248. conflicts.at:1440: ok -./conflicts.at:732: $PREPARSER ./input 249. conflicts.at:1469: testing %expect-rr not enough in grammar rule ... ./conflicts.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -stderr: - -syntax error, unexpected 'a', expecting 'b' -./conflicts.at:732: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - -./conflicts.at:1138: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c --report=all input.y --warnings=none -Werror --trace=none 250. conflicts.at:1498: testing %prec with user string ... ./conflicts.at:1507: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -249. conflicts.at:1469: ok -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y +248. conflicts.at:1440: ok +249. conflicts.at:1469: ok 251. conflicts.at:1515: testing %no-default-prec without %prec ... ./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y -Wnone,none -Werror --trace=none + 252. conflicts.at:1544: testing %no-default-prec with %prec ... ./conflicts.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1096: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.cc input.y --warnings=none -Werror --trace=none + +250. conflicts.at:1498: ok 253. conflicts.at:1568: testing %default-prec ... ./conflicts.at:1584: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:1145: cat input.output -250. conflicts.at:1498: ok - -238. conflicts.at:1127: ok ./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror -stderr: -stdout: -252. conflicts.at:1544: ok -./conflicts.at:1096: $PREPARSER ./input 254. conflicts.at:1592: testing Unreachable States After Conflict Resolution ... -stderr: - -syntax error ./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y -253. conflicts.at:1568: ok -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./conflicts.at:1096: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -233. conflicts.at:1096: ok +252. conflicts.at:1544: ok +./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ + -o input.c input.y --warnings=none -Werror --trace=none 255. conflicts.at:1855: testing Solved conflicts report for multiple reductions in a state ... ./conflicts.at:1881: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all -o input.c input.y -stderr: +253. conflicts.at:1568: ok +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Werror 256. conflicts.at:1935: testing %nonassoc error actions for multiple reductions in a state ... ./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y +stderr: input.y: error: 4 shift/reduce conflicts [-Werror=conflicts-sr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples input.y:1.1-5: error: useless precedence and associativity for '+' [-Werror=precedence] input.y:2.1-5: error: useless precedence and associativity for '*' [-Werror=precedence] -257. conflicts.at:2299: testing %expect-rr non GLR ... -./conflicts.at:1531: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Werror -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 1.y -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y -Werror -258. conflicts.at:2331: testing -W versus %expect and %expect-rr ... stderr: -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret sr-rr.y +stdout: +./output.at:836: $PREPARSER ./parser +stderr: +./conflicts.at:1531: sed 's,.*/$,,' stderr 1>&2 +./output.at:836: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:742: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +144. output.at:744: ok +257. conflicts.at:2299: testing %expect-rr non GLR ... +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 1.y ./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error +stderr: +stderr: input.y:7.5-7: warning: rule useless in parser due to conflicts [-Wother] input.y:11.5-7: warning: rule useless in parser due to conflicts [-Wother] input.y:17.11-26: warning: rule useless in parser due to conflicts [-Wother] input.y:18.11-26: warning: rule useless in parser due to conflicts [-Wother] input.y:19.11-26: warning: rule useless in parser due to conflicts [-Wother] ./conflicts.at:1882: cat input.output | sed -n '/^State 0$/,/^State 1$/p' -259. counterexample.at:43: testing Unifying S/R ... -./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror -stderr: -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Werror input.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] input.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples @@ -6944,29 +6986,29 @@ input.y:25.16: error: rule useless in parser due to conflicts [-Werror=other] input.y:31.5-7: error: rule useless in parser due to conflicts [-Werror=other] input.y:32.4: error: rule useless in parser due to conflicts [-Werror=other] -255. conflicts.at:1855: ok +stderr: +stdout: +./conflicts.at:1096: $PREPARSER ./input + ./conflicts.at:1638: sed 's,.*/$,,' stderr 1>&2 stderr: -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Werror -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +255. conflicts.at:1855: ok +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Werror +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Werror +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=error +233. conflicts.at:1096: ok +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none +258. conflicts.at:2331: testing -W versus %expect and %expect-rr ... +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret sr-rr.y stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example: A . B C - Shift derivation - s - `-> 2: y c - `-> 8: A . B `-> 4: C - Reduce derivation - s - `-> 1: a x - `-> 3: A . `-> 6: B C -input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +1.y: error: %expect-rr applies only to GLR parsers [-Werror=other] + +259. counterexample.at:43: testing Unifying S/R ... +./counterexample.at:55: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=error bison (GNU Bison) 3.8.2 init: 0.000000 # state items: 26 @@ -7245,48 +7287,56 @@ `-> 13: %empty . -./counterexample.at:55: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:737: sed 's,.*/$,,' stderr 1>&2 -stderr: -1.y: error: %expect-rr applies only to GLR parsers [-Werror=other] stderr: -./conflicts.at:2307: sed 's,.*/$,,' stderr 1>&2 -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none stdout: +./conflicts.at:2307: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:388: $PREPARSER ./input '0<0' +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Wnone,none -Werror --trace=none +stderr: +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1959: sed 's,.*/$,,' stderr 1>&2 260. counterexample.at:83: testing Deep Unifying S/R ... +./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none ./counterexample.at:95: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y --warnings=error -./conflicts.at:518: $PREPARSER ./input -./conflicts.at:1959: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:388: $PREPARSER ./input '0<0<0' stderr: -syntax error, unexpected end of file -./conflicts.at:518: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=error +syntax error, unexpected '<', expecting end of file +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example A . B C - Shift derivation s -> [ y -> [ A . B ] c -> [ C ] ] - Reduce derivation s -> [ a -> [ A . ] x -> [ B C ] ] + Example: A . B C + Shift derivation + s + `-> 2: y c + `-> 8: A . B `-> 4: C + Reduce derivation + s + `-> 1: a x + `-> 3: A . `-> 6: B C input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=error -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y -Wnone,none -Werror --trace=none -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=error -stderr: -sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] -sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./conflicts.at:388: $PREPARSER ./input '0>0' stderr: -./conflicts.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none -stdout: -212. conflicts.at:518: 259. counterexample.at:43: ok - ok -./conflicts.at:388: $PREPARSER ./input '0<0' +./counterexample.at:55: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:388: $PREPARSER ./input '0>0>0' +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Werror stderr: -./conflicts.at:2354: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=none -Werror --trace=none +251. conflicts.at:1515: ok +syntax error, unexpected '>', expecting end of file +./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:388: $PREPARSER ./input '0<0>0' +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Wnone,none -Werror --trace=none stderr: +syntax error, unexpected '>', expecting end of file +./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none ./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] Example: A . B C @@ -7317,84 +7367,37 @@ `-> 7: A . `-> 10: B C input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:388: $PREPARSER ./input '0<0<0' -stderr: - +209. conflicts.at:301: ok ./counterexample.at:95: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -syntax error, unexpected '<', expecting end of file -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y -Wnone,none -Werror --trace=none -./conflicts.at:1638: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --report=all input.y --warnings=none -Werror --trace=none -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=error -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y -Wnone,none -Werror --trace=none - -./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y -Wnone,none -Werror --trace=none -./conflicts.at:388: $PREPARSER ./input '0>0' -251. conflicts.at:1515: ok -stderr: -261. counterexample.at:144: testing S/R Conflict with Nullable Symbols ... -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./counterexample.at:157: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -262. counterexample.at:207: testing Non-unifying Ambiguous S/R ... -./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:737: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=canonical-lr -Dparse.lac=full \ - -o input.c input.y --warnings=none -Werror --trace=none -./conflicts.at:388: $PREPARSER ./input '0>0>0' stderr: -./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] Example A . B C - Shift derivation s -> [ ac -> [ A ac -> [ b -> [ . B ] ] C ] ] - Reduce derivation s -> [ a -> [ A . ] bc -> [ B C ] ] -input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] - Example A A . B B C C - Shift derivation s -> [ ac -> [ A ac -> [ A ac -> [ b -> [ . b -> [ B B ] ] ] C ] C ] ] - Reduce derivation s -> [ a -> [ A a -> [ A . ] ] bc -> [ B bc -> [ B C ] C ] ] -input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr + Shift derivation s -> [ y -> [ A . B ] c -> [ C ] ] + Reduce derivation s -> [ a -> [ A . ] x -> [ B C ] ] +input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:55: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr stderr: +sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples ./conflicts.at:1651: cat input.output -syntax error, unexpected '>', expecting end of file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./conflicts.at:2307: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 1.y --warnings=none -Werror --trace=none +259. counterexample.at:43: ok -260. counterexample.at:83: ok -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Wnone,none -Werror --trace=none -./conflicts.at:388: $PREPARSER ./input '0<0>0' +261. counterexample.at:144: testing S/R Conflict with Nullable Symbols ... +./counterexample.at:157: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2354: sed 's,.*/$,,' stderr 1>&2 ./conflicts.at:1836: cat input.y >> input-keep.y -stderr: -syntax error, unexpected '>', expecting end of file -./conflicts.at:388: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +262. counterexample.at:207: testing Non-unifying Ambiguous S/R ... ./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret input-keep.y -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] - First example: B . C $end - Shift derivation - $accept - `-> 0: g $end - `-> 2: x - `-> 6: bc - `-> 9: B . C - Second example: B . C D $end - Reduce derivation - $accept - `-> 0: g $end - `-> 2: x - `-> 5: b cd - `-> 7: B . `-> 8: C D -input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] -209. conflicts.at:301: ok -./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -263. counterexample.at:254: testing Non-unifying Unambiguous S/R ... ./conflicts.at:1959: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --trace=cex -fcaret --report=all -o input.c input.y --warnings=none -Werror --trace=none -./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=error +./counterexample.at:220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y ./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret 2.y stderr: -./counterexample.at:220: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] Example: A . B @@ -7424,57 +7427,48 @@ `-> 5: X x `-> 4: %empty . input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] -264. counterexample.at:298: testing S/R after first token ... +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example A . B C + Shift derivation s -> [ ac -> [ A ac -> [ b -> [ . B ] ] C ] ] + Reduce derivation s -> [ a -> [ A . ] bc -> [ B C ] ] +input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] + Example A A . B B C C + Shift derivation s -> [ ac -> [ A ac -> [ A ac -> [ b -> [ . b -> [ B B ] ] ] C ] C ] ] + Reduce derivation s -> [ a -> [ A a -> [ A . ] ] bc -> [ B bc -> [ B C ] C ] ] +input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:95: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr ./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:314: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none -265. counterexample.at:363: testing Unifying R/R counterexample ... +263. counterexample.at:254: testing Non-unifying Unambiguous S/R ... +./counterexample.at:265: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y ./counterexample.at:157: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +260. counterexample.at:83: ok ./conflicts.at:2239: cat input.output | sed -n '/^State 0$/,/^State 1$/p' -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Werror stderr: -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Werror -./conflicts.at:742: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Werror input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] - First example B . C $end - Shift derivation $accept -> [ g -> [ x -> [ bc -> [ B . C ] ] ] $end ] - Second example B . C D $end - Reduce derivation $accept -> [ g -> [ x -> [ b -> [ B . ] cd -> [ C D ] ] ] $end ] -input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -stderr: -stderr: -input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - Example: b . A X X Y - Shift derivation - a - `-> 2: s - `-> 7: b . xx y - `-> 9: A X X `-> 11: Y - Reduce derivation - a - `-> 1: r t - `-> 3: b . `-> 6: A x xy - `-> 8: X `-> 10: X Y -input.y: warning: shift/reduce conflict on token X [-Wcounterexamples] - First example: A X . X + First example: B . C $end Shift derivation - a - `-> 1: t - `-> 5: A xx - `-> 9: X . X - Second example: X . X xy + $accept + `-> 0: g $end + `-> 2: x + `-> 6: bc + `-> 9: B . C + Second example: B . C D $end Reduce derivation - a - `-> 1: x t - `-> 8: X . `-> 6: X xy -input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr + $accept + `-> 0: g $end + `-> 2: x + `-> 5: b cd + `-> 7: B . `-> 8: C D +input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./counterexample.at:220: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y + 256. conflicts.at:1935: ok +stderr: +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Werror input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] First example: A . A B $end @@ -7492,11 +7486,13 @@ `-> 1: t `-> 3: x `-> 3: x `-> 5: A `-> 5: A . +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y -Wnone,none -Werror --trace=none ./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -./counterexample.at:314: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y + +264. counterexample.at:298: testing S/R after first token ... +./counterexample.at:314: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: -262. counterexample.at:207: ok input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] Example A . B @@ -7509,23 +7505,13 @@ Reduce derivation $accept -> [ s -> [ ax -> [ A x -> [ X x -> [ . ] ] ] by -> [ B y ] ] $end ] input.y:5.4-9: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:157: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./counterexample.at:265: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: A b . - First reduce derivation - a - `-> 1: A b . - Second reduce derivation - a - `-> 1: A b - `-> 3: b . -input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -261. counterexample.at:144: ok stderr: stderr: +2.y: error: %expect-rr applies only to GLR parsers [-Werror=other] +2.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +2.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +2.y:3.12-14: error: rule useless in parser due to conflicts [-Werror=other] +261. counterexample.at:144: ok input-keep.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] input-keep.y: error: 2 reduce/reduce conflicts [-Werror=conflicts-rr] input-keep.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples @@ -7533,21 +7519,58 @@ input-keep.y:26.16: error: rule useless in parser due to conflicts [-Werror=other] input-keep.y:32.5-7: error: rule useless in parser due to conflicts [-Werror=other] input-keep.y:33.4: error: rule useless in parser due to conflicts [-Werror=other] -2.y: error: %expect-rr applies only to GLR parsers [-Werror=other] -2.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -2.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -2.y:3.12-14: error: rule useless in parser due to conflicts [-Werror=other] - -./counterexample.at:372: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +265. counterexample.at:363: testing Unifying R/R counterexample ... +./counterexample.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +./conflicts.at:2354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret sr-rr.y --warnings=none -Werror --trace=none +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token C [-Wcounterexamples] + First example B . C $end + Shift derivation $accept -> [ g -> [ x -> [ bc -> [ B . C ] ] ] $end ] + Second example B . C D $end + Reduce derivation $accept -> [ g -> [ x -> [ b -> [ B . ] cd -> [ C D ] ] ] $end ] +input.y:6.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:220: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr ./conflicts.at:2317: sed 's,.*/$,,' stderr 1>&2 +stderr: ./conflicts.at:1838: sed 's,.*/$,,' stderr 1>&2 +stdout: +262. counterexample.at:207: ok +./conflicts.at:743: $PREPARSER ./input +stderr: +input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + Example: b . A X X Y + Shift derivation + a + `-> 2: s + `-> 7: b . xx y + `-> 9: A X X `-> 11: Y + Reduce derivation + a + `-> 1: r t + `-> 3: b . `-> 6: A x xy + `-> 8: X `-> 10: X Y +input.y: warning: shift/reduce conflict on token X [-Wcounterexamples] + First example: A X . X + Shift derivation + a + `-> 1: t + `-> 5: A xx + `-> 9: X . X + Second example: X . X xy + Reduce derivation + a + `-> 1: x t + `-> 8: X . `-> 6: X xy +input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +stderr: -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=error - -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=error -266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ... stderr: -./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +syntax error, unexpected 'a', expecting 'b' or 'c' +./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] First example A . A B $end @@ -7555,8 +7578,48 @@ Second example A . A $end Reduce derivation $accept -> [ s -> [ s -> [ t -> [ x -> [ A . ] ] ] t -> [ x -> [ A ] ] ] $end ] ./counterexample.at:265: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=error +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=error +./counterexample.at:314: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +263. counterexample.at:254: ok +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y + stderr: +input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example: A b . + First reduce derivation + a + `-> 1: A b . + Second reduce derivation + a + `-> 1: A b + `-> 3: b . +input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +266. counterexample.at:399: testing Non-unifying R/R LR(1) conflict ... +./counterexample.at:409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y +./counterexample.at:372: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y 267. counterexample.at:441: testing Non-unifying R/R LR(2) conflict ... +./counterexample.at:451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y + +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none +stderr: +input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example A b . + First reduce derivation a -> [ A b . ] + Second reduce derivation a -> [ A b -> [ b . ] ] +input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none +268. counterexample.at:488: testing Cex Search Prepend ... +265. counterexample.at:363: ok +./counterexample.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror +stderr: input.y: warning: 2 shift/reduce conflicts [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] Example b . A X X Y @@ -7570,35 +7633,7 @@ input.y:4.4: warning: rule useless in parser due to conflicts [-Wother] input.y:8.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:314: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./counterexample.at:451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: -263. counterexample.at:254: ok -264. counterexample.at:298: ok -stdout: ./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Werror -268. counterexample.at:488: testing Cex Search Prepend ... -./conflicts.at:558: $PREPARSER ./input -stderr: -input.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example A b . - First reduce derivation a -> [ A b . ] - Second reduce derivation a -> [ A b -> [ b . ] ] -input.y:4.9: warning: rule useless in parser due to conflicts [-Wother] -stderr: -./counterexample.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:372: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -265. counterexample.at:363: ok - -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y -Wnone,none -Werror --trace=none - -219. conflicts.at:558: ok -stderr: -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y -Wnone,none -Werror --trace=none - -stderr: input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] First example: D . A $end @@ -7614,11 +7649,28 @@ `-> 4: B b A `-> 6: D . input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] -sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples ./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +./counterexample.at:409: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +264. counterexample.at:298: ok + +./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none + +./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none +stderr: +stdout: 269. counterexample.at:550: testing R/R cex with prec ... ./counterexample.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:558: $PREPARSER ./input +stderr: +stderr: +stderr: +stderr: +syntax error, unexpected end of file, expecting 'b' +input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +sr-rr.y: error: 1 reduce/reduce conflict [-Werror=conflicts-rr] +sr-rr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./conflicts.at:558: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] @@ -7648,20 +7700,6 @@ `-> 5: N a B `-> 7: A . input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -270. counterexample.at:610: testing Null nonterminals ... -./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2 -./counterexample.at:409: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y - -./counterexample.at:621: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./counterexample.at:499: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -271. counterexample.at:797: testing Non-unifying Prefix Share ... -./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error -./conflicts.at:2317: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret 2.y --warnings=none -Werror --trace=none -272. counterexample.at:842: testing Deep Null Unifying ... -./counterexample.at:854: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: input.y: warning: 2 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on tokens A, C [-Wcounterexamples] First example D . A $end @@ -7669,8 +7707,33 @@ Second example B D . A $end Second reduce derivation $accept -> [ s -> [ B b -> [ D . ] A ] $end ] input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -./conflicts.at:1838: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input-keep.y --warnings=none -Werror --trace=none ./counterexample.at:409: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr +270. counterexample.at:610: testing Null nonterminals ... +./counterexample.at:621: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2 +./conflicts.at:2359: sed 's,.*/$,,' stderr 1>&2 +266. counterexample.at:399: ok +219. conflicts.at:558: ok +./counterexample.at:499: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +stdout: +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error +./conflicts.at:564: $PREPARSER ./input +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=error +stderr: +syntax error, unexpected end of file, expecting 'b' +./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +254. conflicts.at:1592: ok +257. conflicts.at:2299: ok + +stderr: + +220. conflicts.at:564: ok +stdout: +./conflicts.at:1096: $PREPARSER ./input +stderr: +stderr: stderr: input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] @@ -7702,26 +7765,6 @@ `-> 7: A c A `-> 5: %empty . `-> 7: %empty ./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -stderr: -266. counterexample.at:399: ok -stderr: -stderr: -./counterexample.at:562: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] - Example: H i J . J J - Shift derivation - s - `-> 2: a J - `-> 3: H i J . J - Reduce derivation - s - `-> 1: a - `-> 3: H i J J - `-> 5: i J . -input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] -stdout: -./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token B [-Wcounterexamples] Example N A . B C @@ -7733,8 +7776,28 @@ Reduce derivation s -> [ n -> [ N n -> [ N a -> [ A . ] B ] D ] C ] input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] ./counterexample.at:499: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./conflicts.at:564: $PREPARSER ./input -stderr: +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./counterexample.at:562: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y + + +271. counterexample.at:797: testing Non-unifying Prefix Share ... +272. counterexample.at:842: testing Deep Null Unifying ... +./counterexample.at:810: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +268. counterexample.at:488: ok +./counterexample.at:854: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +234. conflicts.at:1096: ok +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none + +./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none +273. counterexample.at:884: testing Deep Null Non-unifying ... +./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +274. synclines.at:194: testing Prologue syncline ... +./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + + +275. synclines.at:214: testing %union syncline ... +./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] @@ -7749,68 +7812,32 @@ `-> 3: b `-> 6: D `-> 4: c `-> 5: %empty . -268. counterexample.at:488: ok ./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -./counterexample.at:810: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -syntax error, unexpected end of file, expecting 'b' -./conflicts.at:564: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./counterexample.at:854: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y -Wnone,none -Werror --trace=none -257. conflicts.at:2299: ok -254. conflicts.at:1592: ok -220. conflicts.at:564: ok - -273. counterexample.at:884: testing Deep Null Non-unifying ... -./counterexample.at:896: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -stderr: stderr: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] - Example H i J . J J - Shift derivation s -> [ a -> [ H i J . J ] J ] - Reduce derivation s -> [ a -> [ H i -> [ i J . ] J J ] ] + Example: H i J . J J + Shift derivation + s + `-> 2: a J + `-> 3: H i J . J + Reduce derivation + s + `-> 1: a + `-> 3: H i J J + `-> 5: i J . input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] - -input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] - Example B . b c - First reduce derivation S -> [ B -> [ A -> [ B . ] b A -> [ ] ] C -> [ A -> [ ] c A -> [ ] ] ] - Second reduce derivation S -> [ B C -> [ A -> [ B -> [ A -> [ . ] b A -> [ ] ] ] c A -> [ ] ] ] -input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] - Example C . c b - First reduce derivation S -> [ C -> [ A -> [ C . ] c A -> [ ] ] B -> [ A -> [ ] b A -> [ ] ] ] - Second reduce derivation S -> [ C B -> [ A -> [ C -> [ A -> [ . ] c A -> [ ] ] ] b A -> [ ] ] ] -./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr - -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] - Example A a . D - Shift derivation s -> [ A a d -> [ . D ] ] - Reduce derivation s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] ] -269. counterexample.at:550: ok -./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr ./conflicts.at:2359: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-sr sr-rr.y --warnings=none -Werror --trace=none - -274. synclines.at:194: testing Prologue syncline ... -./synclines.at:194: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -272. counterexample.at:842: 271. counterexample.at:797: ok - ok -275. synclines.at:214: testing %union syncline ... -./synclines.at:214: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: -./conflicts.at:1096: $PREPARSER ./input -stderr: - -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./counterexample.at:854: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y 276. synclines.at:237: testing %union name syncline ... ./synclines.at:253: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +277. synclines.at:264: testing Postprologue syncline ... +./counterexample.at:810: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none +stderr: stderr: - input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] First example: A a . D $end @@ -7828,35 +7855,26 @@ `-> 4: c `-> 5: %empty . ./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -stderr: -stdout: -./counterexample.at:896: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y -234. conflicts.at:1096: ok -277. synclines.at:264: testing Postprologue syncline ... -./conflicts.at:743: $PREPARSER ./input -stderr: - -syntax error, unexpected 'a', expecting 'b' or 'c' -./conflicts.at:743: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y: warning: 4 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] + Example B . b c + First reduce derivation S -> [ B -> [ A -> [ B . ] b A -> [ ] ] C -> [ A -> [ ] c A -> [ ] ] ] + Second reduce derivation S -> [ B C -> [ A -> [ B -> [ A -> [ . ] b A -> [ ] ] ] c A -> [ ] ] ] +input.y: warning: reduce/reduce conflict on tokens b, c [-Wcounterexamples] + Example C . c b + First reduce derivation S -> [ C -> [ A -> [ C . ] c A -> [ ] ] B -> [ A -> [ ] b A -> [ ] ] ] + Second reduce derivation S -> [ C B -> [ A -> [ C -> [ A -> [ . ] c A -> [ ] ] ] b A -> [ ] ] ] ./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c syncline.c -./synclines.at:264: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -278. synclines.at:291: testing Action syncline ... -./synclines.at:291: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c syncline.c -279. synclines.at:310: testing Epilogue syncline ... -./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y +./counterexample.at:562: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./counterexample.at:896: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stderr: +269. counterexample.at:550: ok syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ stderr: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" - | ^~~~~ -./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:194: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7883,7 +7901,36 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:194: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] + Example A a . D + Shift derivation s -> [ A a d -> [ . D ] ] + Reduce derivation s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] ] +./counterexample.at:854: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c syncline.c +stdout: +./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c +syncline.c:4: #error "4" +272. counterexample.at:842: ok +./synclines.at:194: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] + Example H i J . J J + Shift derivation s -> [ a -> [ H i J . J ] J ] + Reduce derivation s -> [ a -> [ H i -> [ i J . ] J J ] ] +input.y:5.13-15: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:810: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr + +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y +./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c input.c +stderr: +./conflicts.at:753: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ +./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7910,21 +7957,11 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c syncline.c -280. synclines.at:327: testing %code top syncline ... -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -stdout: -syncline.c:4: #error "4" stderr: -./synclines.at:214: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -stdout: -stderr: ./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -7952,100 +7989,28 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +271. counterexample.at:797: ok + +./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c +stderr: +stdout: input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] input.y: warning: shift/reduce conflict on token D [-Wcounterexamples] First example A a . D $end Shift derivation $accept -> [ s -> [ A a d -> [ . D ] ] $end ] Second example A a . D E $end Reduce derivation $accept -> [ s -> [ A a a -> [ b -> [ c -> [ . ] ] ] d -> [ D ] E ] $end ] -syncline.c:4: #error "4" ./counterexample.at:896: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -stdout: -./synclines.at:194: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -281. synclines.at:346: testing %destructor syncline ... -syncline.c:4: #error "4" -./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c -./synclines.at:254: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c -./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c syncline.c -./synclines.at:194: $CC $CFLAGS $CPPFLAGS -c input.c -./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -273. counterexample.at:884: ok -./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c input.c stderr: -./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" - | ^~~~~ -./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - +syncline.c:4: #error "4" stdout: +./synclines.at:214: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +278. synclines.at:291: testing Action syncline ... syncline.c:4: #error "4" -./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -stderr: -stderr: -./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c input.y:2:2: error: #error "2" 2 | #error "2" | ^~~~~ -./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" - | ^~~~~ -./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:194: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8072,15 +8037,15 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +./synclines.at:254: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:291: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./synclines.at:214: $CC $CFLAGS $CPPFLAGS -c input.c stderr: -stdout: -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror -stdout: -syncline.c:4: #error "4" syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ +273. counterexample.at:884: ok ./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8108,53 +8073,23 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -stderr: -./synclines.at:291: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +stdout: input.y:2: #error "2" -./synclines.at:214: cat stdout -./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c -input.y:2:2: error: #error "2" - 2 | #error "2" - | ^~~~~ -./synclines.at:194: "$PERL" -p -0777 - stderr <<\EOF || exit 77 - # Remove left-hand margin. - s/^[\d ]{6}\| //gm; - - # 1. Remove useless lines. - - # distcc clutter. - s/^distcc\[\d+\] .*\n//gm; - # c vs. c++. - s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; - # Function context. - s/^[^:]*: In function '[^']+':\n//gm; - # Caret error (with possible '~' to underline). - s/^ *#error.*\n *\^~*\n//gm; - # Number of errors. - s/^1 error generated\.\n//gm; - - # 2. Normalize the lines we kept. - - # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). - s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; - # Remove column. - s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; - # Map all combinations of "error: " and "#error: " to "#error ". - s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; -EOF - +./synclines.at:194: cat stdout +./synclines.at:254: $CC $CFLAGS $CPPFLAGS -c input.c +279. synclines.at:310: testing Epilogue syncline ... +./synclines.at:310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stdout: -stderr: +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Werror syncline.c:4: #error "4" -./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c input.c ./synclines.at:264: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 stderr: -stdout: -input.y:8:2: error: #error "8" - 8 | #error "8" +input.y:2:2: error: #error "2" + 2 | #error "2" | ^~~~~ -./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + +./synclines.at:214: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8182,8 +8117,15 @@ EOF ./synclines.at:264: $CC $CFLAGS $CPPFLAGS -c input.c -275. synclines.at:214: ok +274. synclines.at:194: ok +stdout: +280. synclines.at:327: testing %code top syncline ... +stderr: +./synclines.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y input.y:2: #error "2" +./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c syncline.c +./synclines.at:214: cat stdout +stderr: input.y:1:7: error: expected '{' before 'break' 1 | %union break | ^~~~~ @@ -8314,6 +8256,7 @@ input.c:1162:11: warning: implicit declaration of function 'yydestruct' [-Wimplicit-function-declaration] 1162 | yydestruct ("Error: discarding", | ^~~~~~~~~~ +281. synclines.at:346: testing %destructor syncline ... ./synclines.at:254: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8341,14 +8284,50 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:194: cat stdout -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Werror +./synclines.at:346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +input.y:13:2: error: #error "13" + 13 | #error "13" + | ^~~~~ +./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF + + +275. synclines.at:214: ok stderr: -./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c syncline.c stdout: -input.y: In function 'yyparse': -input.y:8:2: error: #error "8" - 8 | #error "8" +stdout: +./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c syncline.c +stderr: +sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +input.y:13: #error "13" +./synclines.at:264: cat stdout +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" | ^~~~~ ./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. @@ -8377,10 +8356,6 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -282. synclines.at:370: testing %printer syncline ... -input.y:8: #error "8" -./synclines.at:310: cat stdout -stdout: input.y:1: #error expected '{' before 'break' %union break ^~~~~ @@ -8508,22 +8483,27 @@ input.c:1162: #error warning: implicit declaration of function 'yydestruct' [-Wimplicit-function-declaration] yydestruct ("Error: discarding", ^~~~~~~~~~ -./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: ./synclines.at:255: grep '^input.y:1' stdout stdout: -input.y:8: #error "8" -./synclines.at:291: cat stdout -279. synclines.at:310: ok +277. synclines.at:264: ok +282. synclines.at:370: testing %printer syncline ... +stdout: +input.y:1: #error expected '{' before 'break' +input.y:1: #error expected '{' before 'break' + +syncline.c:4: #error "4" +./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2 +276. synclines.at:237: ok +./synclines.at:291: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c syncline.c +./synclines.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./synclines.at:291: $CC $CFLAGS $CPPFLAGS -c input.c stderr: -input.y:13:2: error: #error "13" - 13 | #error "13" - | ^~~~~ syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" 4 | #error "4" | ^~~~~ -./synclines.at:264: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8550,8 +8530,17 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error -./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +283. synclines.at:440: testing syncline escapes: yacc.c ... +./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c syncline.c +stderr: +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ +./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 +./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8578,12 +8567,16 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF +stdout: +syncline.c:4: #error "4" +./synclines.at:310: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 + stderr: -syncline.c: In function 'foo': -syncline.c:4:2: error: #error "4" - 4 | #error "4" +input.y: In function 'yyparse': +input.y:8:2: error: #error "8" + 8 | #error "8" | ^~~~~ -./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:291: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8610,53 +8603,56 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -274. synclines.at:194: 278. synclines.at:291: ok - ok -stdout: stdout: -stdout: -stderr: -sr-rr.y: error: 1 shift/reduce conflict [-Werror=conflicts-sr] +./synclines.at:310: $CC $CFLAGS $CPPFLAGS -c input.c syncline.c:4: #error "4" +./synclines.at:346: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +284. synclines.at:440: testing syncline escapes: glr.c ... +stderr: +stdout: +./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 +input.y:8: #error "8" +./synclines.at:291: cat stdout +syncline.c: In function 'foo': +syncline.c:4:2: error: #error "4" + 4 | #error "4" + | ^~~~~ +./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c input.c +./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + # 1. Remove useless lines. -./synclines.at:327: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -input.y:1: #error expected '{' before 'break' -input.y:1: #error expected '{' before 'break' -stdout: -syncline.c:4: #error "4" -./synclines.at:346: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -./conflicts.at:2363: sed 's,.*/$,,' stderr 1>&2 -276. synclines.at:237: ok -input.y:13: #error "13" -./synclines.at:264: cat stdout + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF -./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c input.c -277. synclines.at:264: ./synclines.at:346: $CC $CFLAGS $CPPFLAGS -c input.c - ok -283. synclines.at:440: testing syncline escapes: yacc.c ... -./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=error stderr: -284. synclines.at:440: testing syncline escapes: glr.c ... -./synclines.at:440: $CC $CFLAGS $CPPFLAGS \"\\\"\".c -o \"\\\"\" || exit 77 ./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c syncline.c -input.y: error: 2 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -286. synclines.at:440: testing syncline escapes: glr.cc ... -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -stderr: 285. synclines.at:440: testing syncline escapes: lalr1.cc ... ./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 -stderr: -input.y:2:2: error: #error "2" - 2 | #error "2" - | ^~~~~ -input.y: In function 'yydestruct': -input.y:2:2: error: #error "2" - 2 | #error "2" +input.y:8:2: error: #error "8" + 8 | #error "8" | ^~~~~ -./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +./synclines.at:310: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8683,7 +8679,20 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 +stdout: +278. synclines.at:291: ok +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none +stderr: +syncline.c:4: #error "4" +stdout: +./synclines.at:327: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 +input.y:8: #error "8" +input.y: In function 'yydestruct': +input.y:2:2: error: #error "2" + 2 | #error "2" + | ^~~~~ +./synclines.at:310: cat stdout +./synclines.at:346: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8710,9 +8719,7 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF - - -stdout: +./synclines.at:327: $CC $CFLAGS $CPPFLAGS -c input.c stderr: syncline.c: In function 'foo': syncline.c:4:2: error: #error "4" @@ -8745,40 +8752,68 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF + stdout: -input.y:2: #error "2" -./synclines.at:327: cat stdout -./conflicts.at:748: sed 's,.*/$,,' stderr 1>&2 -stdout: +279. synclines.at:310: ok input.y:2: #error "2" ./synclines.at:346: cat stdout +stdout: syncline.c:4: #error "4" -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y -Wnone,none -Werror --trace=none -287. synclines.at:440: testing syncline escapes: glr2.cc ... -./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 ./synclines.at:370: test "`cat stdout`" = 'syncline.c:4: #error "4"' || exit 77 -288. synclines.at:497: testing %no-lines: yacc.c ... -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y -280. synclines.at:327: ok -./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c -281. synclines.at:346: ok -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=error -./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none stderr: stderr: +281. synclines.at:346: ok +stderr: stdout: +input.y:2:2: error: #error "2" + 2 | #error "2" + | ^~~~~ +./synclines.at:327: "$PERL" -p -0777 - stderr <<\EOF || exit 77 + # Remove left-hand margin. + s/^[\d ]{6}\| //gm; + + # 1. Remove useless lines. + + # distcc clutter. + s/^distcc\[\d+\] .*\n//gm; + # c vs. c++. + s/^clang: warning: treating 'c' input as 'c\+\+'.*\n//gm; + # Function context. + s/^[^:]*: In function '[^']+':\n//gm; + # Caret error (with possible '~' to underline). + s/^ *#error.*\n *\^~*\n//gm; + # Number of errors. + s/^1 error generated\.\n//gm; + + # 2. Normalize the lines we kept. + + # xlc messages. Remove also error identifier (e.g., "1540-0218 (S)"). + s/^"(.*?)", line ([\w.]*): \d+-\d+ \(.\) /$1:$2: /gm; + # Remove column. + s/^([^:]+:\d+)[.:][^:]+:(.+)$/$1:$2/gm; + # Map all combinations of "error: " and "#error: " to "#error ". + s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; +EOF + +./conflicts.at:2363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wno-conflicts-rr sr-rr.y --warnings=none -Werror --trace=none stdout: +./synclines.at:370: $CC $CFLAGS $CPPFLAGS -c input.c +stdout: +input.y:2: #error "2" +./synclines.at:327: cat stdout +286. synclines.at:440: testing syncline escapes: glr.cc ... -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y -./synclines.at:497: mv input.c without.c ./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y +280. synclines.at:327: ok stderr: input.y: In function 'yy_symbol_value_print': input.y:2:2: error: #error "2" 2 | #error "2" | ^~~~~ -stderr: +287. synclines.at:440: testing syncline escapes: glr2.cc ... ./synclines.at:370: "$PERL" -p -0777 - stderr <<\EOF || exit 77 # Remove left-hand margin. s/^[\d ]{6}\| //gm; @@ -8806,359 +8841,427 @@ s/^([^:]+:\d+):( |#error|error|:)+/$1: #error /gm; EOF -./synclines.at:497: mv input.h without.h -./synclines.at:497: grep '#line' *.c *.h -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y -stdout: -289. synclines.at:497: testing %no-lines: glr.c ... +./synclines.at:440: $CXX $CXXFLAGS $CPPFLAGS \"\\\"\".cc -o \"\\\"\" || exit 77 +288. synclines.at:497: testing %no-lines: yacc.c ... ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y stdout: input.y:2: #error "2" ./synclines.at:370: cat stdout -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".c \"\\\"\".y -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y -Wnone,none -Werror --trace=none +282. synclines.at:370: ok + +stderr: +stdout: +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS + +./synclines.at:497: mv input.c without.c +289. synclines.at:497: testing %no-lines: glr.c ... +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.c -d input.y +./synclines.at:497: mv input.h without.h +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: grep '#line' *.c *.h +./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 290. synclines.at:497: testing %no-lines: lalr1.cc ... ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y stderr: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stdout: -282. synclines.at:370: ok -./synclines.at:497: mv input.c with.c stderr: -./synclines.at:497: mv input.h with.h -./synclines.at:497: grep -v '#line' with.c >expout -./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y ./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS stdout: -./synclines.at:497: cat without.c - -./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y -./synclines.at:497: mv input.cc without.cc -./synclines.at:497: grep -v '#line' with.h >expout -./synclines.at:497: cat without.h ./synclines.at:497: mv input.c without.c -./synclines.at:497: mv input.hh without.hh ./synclines.at:497: mv input.h without.h -./synclines.at:497: grep '#line' *.cc *.hh -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +stderr: +./synclines.at:440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o \"\\\"\".cc \"\\\"\".y +./synclines.at:497: mv input.c with.c ./synclines.at:497: grep '#line' *.c *.h -291. synclines.at:497: testing %no-lines: glr.cc ... -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -./synclines.at:440: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".c $LIBS -288. synclines.at:497: ok +stdout: +./conflicts.at:754: $PREPARSER ./input +./synclines.at:497: mv input.h with.h ./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -d input.y +stderr: +./synclines.at:497: grep -v '#line' with.c >expout +syntax error, unexpected 'a', expecting 'b' or 'c' +./synclines.at:497: mv input.cc without.cc +./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./synclines.at:497: mv input.hh without.hh +./synclines.at:497: grep '#line' *.cc *.hh +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: cat without.c +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +./synclines.at:497: grep -v '#line' with.h >expout +228. conflicts.at:676: ok ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS +./synclines.at:497: cat without.h +288. synclines.at:497: ok +./synclines.at:497: mv input.c with.c ./synclines.at:497: mv input.cc with.cc -./conflicts.at:748: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Dlr.type=ielr -Dparse.lac=full -o input.c input.y --warnings=none -Werror --trace=none +./synclines.at:440: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS + ./synclines.at:497: mv input.hh with.hh +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: mv input.h with.h ./synclines.at:497: grep -v '#line' with.cc >expout - ./synclines.at:440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o \"\\\"\" \"\\\"\".cc $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: grep -v '#line' with.c >expout ./synclines.at:497: cat without.cc -./synclines.at:497: mv input.cc without.cc -./synclines.at:497: mv input.c with.c -./synclines.at:497: mv input.hh without.hh -./synclines.at:497: grep '#line' *.cc *.hh -292. synclines.at:497: testing %no-lines: glr2.cc ... -./synclines.at:497: mv input.h with.h ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +./synclines.at:497: cat without.c + ./synclines.at:497: grep -v '#line' with.hh >expout -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y -./synclines.at:497: grep -v '#line' with.c >expout +291. synclines.at:497: testing %no-lines: glr.cc ... ./synclines.at:497: cat without.hh -./synclines.at:497: cat without.c -stderr: -290. synclines.at:497: ok -stdout: -./conflicts.at:753: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y ./synclines.at:497: grep -v '#line' with.h >expout -283. synclines.at:440: ok -./synclines.at:497: cat without.h +290. synclines.at:497: ok ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:497: mv input.cc without.cc -stderr: - -./synclines.at:497: mv input.hh without.hh -stdout: +./synclines.at:497: cat without.h +292. synclines.at:497: testing %no-lines: glr2.cc ... 289. synclines.at:497: ok -./synclines.at:497: grep '#line' *.cc *.hh -./conflicts.at:1096: $PREPARSER ./input ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --no-lines -o input.cc -d input.y stderr: -./synclines.at:497: mv input.cc with.cc -./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./synclines.at:497: mv input.hh with.hh +stdout: +283. synclines.at:440: ok + +./synclines.at:497: mv input.cc without.cc +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: mv input.hh without.hh +./synclines.at:497: grep '#line' *.cc *.hh 293. synclines.at:507: testing Output columns ... ./synclines.at:540: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./synclines.at:497: grep -v '#line' with.cc >expout +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -235. conflicts.at:1096: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./synclines.at:497: cat without.cc 294. headers.at:56: testing Invalid CPP guards: --defines=input/input.h ... +./synclines.at:497: mv input.cc without.cc +./synclines.at:497: mv input.hh without.hh +./synclines.at:497: grep '#line' *.cc *.hh +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./headers.at:56: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y -./synclines.at:497: grep -v '#line' with.hh >expout -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stderr: +./synclines.at:497: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc -d input.y 295. headers.at:57: testing Invalid CPP guards: --defines=9foo.h ... -./synclines.at:497: cat without.hh +stdout: +./conflicts.at:1096: $PREPARSER ./input ./synclines.at:497: mv input.cc with.cc - +stderr: ./synclines.at:497: mv input.hh with.hh +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y ./synclines.at:497: grep -v '#line' with.cc >expout -291. synclines.at:497: ok ./synclines.at:541: sed -ne '/--BEGIN/,/--END/{' \ -e '/input.c/s/ [0-9]* / LINE /;' \ -e 'p;}' \ input.c -./headers.at:57: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -296. headers.at:58: testing Invalid CPP guards: %glr-parser --defines=input/input.h ... ./synclines.at:497: cat without.cc -293. synclines.at:507: ok +236. conflicts.at:1096: ok ./synclines.at:497: grep -v '#line' with.hh >expout -./headers.at:58: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y +293. synclines.at:507: ok +./synclines.at:497: mv input.cc with.cc +./synclines.at:497: cat without.hh ./headers.at:56: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./synclines.at:497: mv input.hh with.hh +291. synclines.at:497: ok +./synclines.at:497: grep -v '#line' with.cc >expout +./synclines.at:497: cat without.cc ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./synclines.at:497: cat without.hh -292. synclines.at:497: ok +./synclines.at:497: grep -v '#line' with.hh >expout ./headers.at:57: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c +./synclines.at:497: cat without.hh + ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +292. synclines.at:497: ok 297. headers.at:59: testing Invalid CPP guards: %glr-parser --defines=9foo.h ... -298. headers.at:67: testing export YYLTYPE ... -./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header -o input.c input.y +296. headers.at:58: testing Invalid CPP guards: %glr-parser --defines=input/input.h ... ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./headers.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=9foo.h --output=9foo.c 9foo.y -./headers.at:58: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c - -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +298. headers.at:67: testing export YYLTYPE ... +./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --header -o input.c input.y stderr: -299. headers.at:177: testing Sane headers: ... -./headers.at:177: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y stdout: -./conflicts.at:1096: $PREPARSER ./input -stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./headers.at:58: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --defines=input/input.h --output=input/input.c input/input.y +294. headers.at:56: ok +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file ./headers.at:59: $CC $CFLAGS $CPPFLAGS -c -o 9foo.o -I. -c 9foo.c -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +299. headers.at:177: testing Sane headers: ... +./headers.at:177: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y + ./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Werror stderr: -stderr: -stdout: stdout: +./headers.at:58: $CC $CFLAGS $CPPFLAGS -c -o input/input.o -I. -c input/input.c 295. headers.at:57: ok -294. headers.at:56: ok -236. conflicts.at:1096: ok -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +300. headers.at:178: testing Sane headers: %locations %debug ... +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file ./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c - -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -stderr: -stdout: +./headers.at:178: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y stderr: -./conflicts.at:754: $PREPARSER ./input input.y:11.1-18: error: deprecated directive: '%name-prefix "my_"', use '%define api.prefix {my_}' [-Werror=deprecated] input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -stderr: -syntax error, unexpected 'a', expecting 'b' or 'c' - -./conflicts.at:754: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -300. headers.at:178: testing Sane headers: %locations %debug ... ./headers.at:85: sed 's,.*/$,,' stderr 1>&2 -./headers.at:178: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -228. conflicts.at:676: ok +./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c 301. headers.at:180: testing Sane headers: %glr-parser ... ./headers.at:180: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -302. headers.at:181: testing Sane headers: %locations %debug %glr-parser ... +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=error -./headers.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c - +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file ./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y -Wnone,none -Werror --trace=none -303. headers.at:183: testing Sane headers: api.pure ... -./headers.at:183: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./headers.at:85: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret --header -o input.c input.y --warnings=none -Werror --trace=none +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stderr: -stdout: stderr: -284. synclines.at:440: ok stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stdout: +./conflicts.at:1096: $PREPARSER ./input +stderr: +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./headers.at:177: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./headers.at:102: $CC $CFLAGS $CPPFLAGS -c -o caller.o caller.c - +235. conflicts.at:1096: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -304. headers.at:184: testing Sane headers: api.push-pull=both ... -./headers.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y 299. headers.at:177: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file + ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +stderr: +stdout: +./headers.at:103: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +302. headers.at:181: testing Sane headers: %locations %debug %glr-parser ... +./headers.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +303. headers.at:183: testing Sane headers: api.pure ... +./headers.at:183: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y stderr: -305. headers.at:185: testing Sane headers: api.pure api.push-pull=both ... -./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stdout: +284. synclines.at:440: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y -./headers.at:103: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c + ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stderr: -./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -stdout: -stderr: -296. headers.at:58: ok +304. headers.at:184: testing Sane headers: api.push-pull=both ... stdout: +./headers.at:184: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +297. headers.at:59: ok ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: +stderr: +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stdout: +stdout: +296. headers.at:58: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:104: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o caller caller.o input.o $LIBS +./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file + +305. headers.at:185: testing Sane headers: api.pure api.push-pull=both ... +./headers.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.c input.y stderr: -stderr: -stdout: -./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -303. headers.at:183: ok +stderr: 306. headers.at:187: testing Sane headers: c++ ... +stdout: ./headers.at:187: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -297. headers.at:59: ok +./headers.at:105: $PREPARSER ./caller stderr: -stdout: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./headers.at:178: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +298. headers.at:67: ok +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file stderr: +./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stdout: -./headers.at:104: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o caller caller.o input.o $LIBS -300. headers.at:178: ok - +300. headers.at:178: ok ./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -stderr: -308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ... -./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 307. headers.at:188: testing Sane headers: %locations %debug c++ ... ./headers.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y -stdout: +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +308. headers.at:189: testing Sane headers: c++ api.value.type=variant parse.assert ... +./headers.at:189: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y stderr: stdout: -./headers.at:105: $PREPARSER ./caller -./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./headers.at:183: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file stderr: +stdout: +303. headers.at:183: ok +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc + +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 309. headers.at:191: testing Sane headers: %locations c++ %glr-parser ... -./headers.at:105: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./headers.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o input.cc input.y ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -298. headers.at:67: ok -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +stderr: +stdout: +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stdout: -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +305. headers.at:185: ok -304. headers.at:184: ok ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: H i . J K $end + Shift derivation + $accept + `-> 0: a $end + `-> 2: H i + `-> 4: i . J K + Second example: H i . J $end + Reduce derivation + $accept + `-> 0: s $end + `-> 1: a J + `-> 2: H i . +input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stderr: +stdout: 310. headers.at:199: testing Several parsers ... - -./headers.at:320: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x1.c x1.y ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o input.o input.cc +./headers.at:320: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x1.c x1.y +./headers.at:184: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -311. actions.at:24: testing Midrule actions ... -./actions.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -stderr: ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: stdout: +304. headers.at:184: ok ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:185: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c ./headers.at:320: $CC $CFLAGS $CPPFLAGS -c -o x1.o x1.c -stderr: + +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +311. actions.at:24: testing Midrule actions ... +./actions.at:59: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y ./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file ./actions.at:60: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +stderr: stdout: -285. synclines.at:440: stderr: - ok -stdout: -305. headers.at:185: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +285. synclines.at:440: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: stdout: -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file - +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file ./headers.at:180: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c 312. actions.at:72: testing Typed midrule actions ... ./actions.at:109: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -313. actions.at:122: testing Implicitly empty rule ... -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y +./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file stderr: stdout: -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file 301. headers.at:180: ok -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Werror +stderr: +stdout: +./headers.at:320: echo "x1" >>expout +./headers.at:321: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x2.c x2.y -./actions.at:110: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -314. actions.at:172: testing Invalid uses of %empty ... -./actions.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret one.y +./actions.at:110: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +313. actions.at:122: testing Implicitly empty rule ... +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./headers.at:321: $CC $CFLAGS $CPPFLAGS -c -o x2.o x2.c +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Werror stdout: -./actions.at:192: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -u one.y 286. synclines.at:440: ok +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file + +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: 1.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] 11 | a: /* empty. */ {}; | ^~ | %empty 1.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./actions.at:202: sed -e '1,8d' one.y +314. actions.at:172: testing Invalid uses of %empty ... +./actions.at:182: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret one.y ./actions.at:133: sed 's,.*/$,,' stderr 1>&2 -./actions.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret two.y - ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -314. actions.at:172: ok -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=error +./actions.at:192: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -u one.y ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -315. actions.at:240: testing Valid uses of %empty ... -./actions.at:259: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=error +stderr: +stdout: +./actions.at:202: sed -e '1,8d' one.y +./actions.at:61: $PREPARSER ./input +stderr: +./actions.at:61: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret two.y ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stdout: -./headers.at:320: echo "x1" >>expout -./actions.at:259: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +311. actions.at:24: ok +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +314. actions.at:172: ok +./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file + +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Wnone,none -Werror --trace=none + +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: +stdout: +302. headers.at:181: ok +315. actions.at:240: testing Valid uses of %empty ... +./actions.at:259: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 316. actions.at:270: testing Add missing %empty ... ./actions.at:285: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --update -Wall input.y -./headers.at:321: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x2.c x2.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stderr: +./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=none -Werror --trace=none + input.y:3.4-5: warning: empty rule without %empty [-Wempty-rule] input.y:4.3-5.1: warning: empty rule without %empty [-Wempty-rule] input.y:6.3: warning: empty rule without %empty [-Wempty-rule] @@ -9166,60 +9269,27 @@ input.y:9.3: warning: empty rule without %empty [-Wempty-rule] bison: file 'input.y' was updated (backup: 'input.y~') ./actions.at:286: cat input.y -stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:300: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y -Wnone,none -Werror --trace=none ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./headers.at:181: $CC $CFLAGS $CPPFLAGS -c -o $h.o $h.c -stderr: +./actions.at:300: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall input.y +./actions.at:259: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +317. actions.at:365: testing Initial location: yacc.c ... +./actions.at:365: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 316. actions.at:270: ok -stdout: -./actions.at:133: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -Wempty-rule 1.y --warnings=none -Werror --trace=none -302. headers.at:181: ok +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret 2.y +./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:365: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -stderr: -./headers.at:321: $CC $CFLAGS $CPPFLAGS -c -o x2.o x2.c -stdout: -./actions.at:61: $PREPARSER ./input - -stderr: ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:61: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret 2.y -311. actions.at:24: ok ./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Werror 318. actions.at:366: testing Initial location: yacc.c api.pure=full ... ./actions.at:366: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -317. actions.at:365: testing Initial location: yacc.c ... -./actions.at:365: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file - -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./actions.at:366: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Werror -319. actions.at:367: testing Initial location: yacc.c api.pure %parse-param { int x } ... -./actions.at:367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -./actions.at:365: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./actions.at:111: $PREPARSER ./input -stderr: -stderr: -stdout: -./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:260: $PREPARSER ./input -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -stderr: -312. actions.at:72: ok -./actions.at:260: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +258. conflicts.at:2331: ok stderr: -315. actions.at:240: ok -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file + 2.y:11.17-18: error: empty rule without %empty [-Werror=empty-rule] 11 | a: /* empty. */ {}; | ^~ @@ -9229,282 +9299,251 @@ | ^~ | %empty 2.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] - ./actions.at:149: sed 's,.*/$,,' stderr 1>&2 - +./actions.at:366: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +319. actions.at:367: testing Initial location: yacc.c api.pure %parse-param { int x } ... +./actions.at:367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=error -320. actions.at:368: testing Initial location: yacc.c api.push-pull=both ... -./actions.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -321. actions.at:369: testing Initial location: yacc.c api.push-pull=both api.pure=full ... -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file -./actions.at:369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: -./conflicts.at:1096: $PREPARSER ./input +./headers.at:321: echo "x2" >>expout +./headers.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x3.c x3.y stderr: -syntax error -./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./actions.at:111: $PREPARSER ./input +stderr: +./actions.at:367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:111: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y -Wnone,none -Werror --trace=none -237. conflicts.at:1096: ok -./actions.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +312. actions.at:72: ok ./actions.at:149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret 2.y --warnings=none -Werror --trace=none -322. actions.at:370: testing Initial location: glr.c ... -./actions.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./headers.at:322: $CC $CFLAGS $CPPFLAGS -c -o x3.o x3.c +320. actions.at:368: testing Initial location: yacc.c api.push-pull=both ... +./actions.at:368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -Wno-empty-rule 2.y -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./actions.at:366: $PREPARSER ./input +./actions.at:365: $PREPARSER ./input stderr: +313. actions.at:122: ok 1.1 1.1: syntax error -./actions.at:366: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -318. actions.at:366: ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -313. actions.at:122: ok +317. actions.at:365: ok stdout: +./actions.at:260: $PREPARSER ./input +stderr: -./conflicts.at:2417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wnone $file -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:260: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +315. actions.at:240: ok +stderr: +321. actions.at:369: testing Initial location: yacc.c api.push-pull=both api.pure=full ... +./actions.at:369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stdout: +322. actions.at:370: testing Initial location: glr.c ... +./actions.at:370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc 323. actions.at:371: testing Initial location: glr.c api.pure ... -./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -324. actions.at:372: testing Initial location: lalr1.cc ... -./actions.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -./conflicts.at:2418: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Werror $file +stderr: stdout: -./actions.at:365: $PREPARSER ./input +./actions.at:371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stdout: +./actions.at:366: $PREPARSER ./input +./conflicts.at:1096: $PREPARSER ./input +./actions.at:370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./actions.at:371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +syntax error +./conflicts.at:1096: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./actions.at:369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 1.1 1.1: syntax error -./actions.at:365: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:366: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +237. conflicts.at:1096: ok +318. actions.at:366: ok stderr: stdout: + ./actions.at:367: $PREPARSER ./input -317. actions.at:365: ok -stderr: + stderr: 1.1 1.1: syntax error ./actions.at:367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:372: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -./headers.at:321: echo "x2" >>expout +./actions.at:371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +324. actions.at:372: testing Initial location: lalr1.cc ... +./actions.at:372: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y 319. actions.at:367: ok -./headers.at:322: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x3.c x3.y +325. actions.at:373: testing Initial location: glr.cc ... +./actions.at:373: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:372: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:373: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +326. actions.at:374: testing Initial location: glr2.cc ... +./actions.at:374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: -./actions.at:369: $PREPARSER ./input stdout: +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: -325. actions.at:373: testing Initial location: glr.cc ... -1.1 -1.1: syntax error +stdout: ./actions.at:368: $PREPARSER ./input stderr: -./actions.at:369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file 1.1 1.1: syntax error ./actions.at:368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:373: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - 320. actions.at:368: ok -321. actions.at:369: ./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file - ok -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -326. actions.at:374: testing Initial location: glr2.cc ... -./actions.at:373: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - -./headers.at:322: $CC $CFLAGS $CPPFLAGS -c -o x3.o x3.c -./actions.at:374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -327. actions.at:383: testing Initial location: yacc.c api.pure=full ... -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -328. actions.at:394: testing Initial location: yacc.c api.pure=full ... -./actions.at:383: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:374: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file +./actions.at:369: $PREPARSER ./input +stderr: +327. actions.at:383: testing Initial location: yacc.c api.pure=full ... +1.1 +1.1: syntax error +./actions.at:369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:383: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +321. actions.at:369: ok + ./actions.at:383: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +328. actions.at:394: testing Initial location: yacc.c api.pure=full ... +./actions.at:394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc ./actions.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file stderr: -./conflicts.at:2444: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wnone $file stdout: +./headers.at:322: echo "x3" >>expout stderr: -stderr: -stdout: -./conflicts.at:2445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Werror $file +./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x4.c x4.y stdout: -./actions.at:383: $PREPARSER ./input -./actions.at:394: $PREPARSER ./input +./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc - -: syntax error +stdout: +306. headers.at:187: ok stderr: -./actions.at:383: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -0 -0: syntax error -./actions.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -258. conflicts.at:2331: ok -327. actions.at:383: ok -328. actions.at:394: ok - - +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] +time limit exceeded: 6.000000 + First example H i . J K $end + Shift derivation $accept -> [ a -> [ H i -> [ i . J K ] ] $end ] + Second example H i . J $end + Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ] +input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +./headers.at:323: $CC $CFLAGS $CPPFLAGS -c -o x4.o x4.c +267. counterexample.at:441: ok 329. actions.at:478: testing Location print: yacc.c ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -330. actions.at:478: testing Location print: glr.c ... + stderr: +stdout: +./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +330. actions.at:478: testing Location print: glr.c ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +stderr: stdout: -./actions.at:370: $PREPARSER ./input +308. headers.at:189: ok + +./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: 331. actions.at:478: testing Location print: lalr1.cc ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stdout: +./actions.at:370: $PREPARSER ./input +stderr: stderr: +stdout: 1.1 1.1: syntax error ./actions.at:370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:383: $PREPARSER ./input +stderr: + +: syntax error +./actions.at:383: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 322. actions.at:370: ok +./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +327. actions.at:383: ok +./actions.at:394: $PREPARSER ./input stderr: -./actions.at:478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: + +0 +0: syntax error +./actions.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stdout: -./headers.at:322: echo "x3" >>expout -./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./actions.at:371: $PREPARSER ./input -./headers.at:323: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x4.c x4.y +328. actions.at:394: ok stderr: +332. actions.at:478: testing Location print: glr.cc ... 1.1 1.1: syntax error ./actions.at:371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -287. synclines.at:440: stdout: - ok -stderr: -323. actions.at:371: ok -332. actions.at:478: testing Location print: glr.cc ... ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stdout: -./headers.at:187: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc - -./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -./headers.at:323: $CC $CFLAGS $CPPFLAGS -c -o x4.o x4.c -stdout: +323. actions.at:371: ok 333. actions.at:478: testing Location print: glr2.cc ... -306. headers.at:187: ok ./actions.at:478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y 334. actions.at:488: testing Exotic Dollars ... -./actions.at:532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y -stderr: -stdout: +./actions.at:532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -v -o input.c input.y +./actions.at:478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./actions.at:478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 335. actions.at:1047: testing Printers and Destructors ... ./actions.at:1047: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./headers.at:189: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc ./actions.at:533: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -308. headers.at:189: ok -./actions.at:1047: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: - -stdout: ./actions.at:478: $PREPARSER ./input stderr: ./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +329. actions.at:478: ok + +./actions.at:1047: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 336. actions.at:1048: testing Printers and Destructors with union ... ./actions.at:1048: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1048: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: H i . J K $end - Shift derivation - $accept - `-> 0: a $end - `-> 2: H i - `-> 4: i . J K - Second example: H i . J $end - Reduce derivation - $accept - `-> 0: s $end - `-> 1: a J - `-> 2: H i . -input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -329. actions.at:478: ok -./counterexample.at:451: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +stdout: +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +stderr: +stdout: +287. synclines.at:440: ok 337. actions.at:1050: testing Printers and Destructors: %glr-parser ... ./actions.at:1050: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1048: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:1050: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -stderr: -stdout: ./actions.at:534: $PREPARSER ./input stderr: ./actions.at:534: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:562: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -stdout: -./actions.at:372: $PREPARSER ./input -stderr: -1.1 -1.1: syntax error -./actions.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:562: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -324. actions.at:372: ok - stderr: stdout: -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc -338. actions.at:1051: testing Printers and Destructors with union: %glr-parser ... -./actions.at:1051: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc ./actions.at:1047: $PREPARSER ./input '(x)' stderr: sending: '(' (0@0-9) @@ -9541,13 +9580,8 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1051: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:1047: $PREPARSER ./input '(y)' stderr: -stderr: -stderr: -stdout: -stdout: sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -9561,13 +9595,7 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:563: $PREPARSER ./input -./actions.at:478: $PREPARSER ./input -stderr: -stderr: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1047: $PREPARSER ./input '(xxxxx)(x)(x)y' -./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9605,26 +9633,8 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -330. actions.at:478: ok -334. actions.at:488: ok -stderr: -stdout: ./actions.at:1047: $PREPARSER ./input '(x)(x)x' -./actions.at:1048: $PREPARSER ./input '(x)' stderr: -stderr: - -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9645,11 +9655,10 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./actions.at:1047: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' -./actions.at:1048: $PREPARSER ./input '!' stderr: +./actions.at:1047: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' +stdout: +./actions.at:478: $PREPARSER ./input stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9695,6 +9704,37 @@ Freeing nterm line (0@0-29) Parsing FAILED (status 2). ./actions.at:1047: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +335. actions.at:1047: ok +stdout: +./actions.at:563: $PREPARSER ./input +330. actions.at:478: ok +stderr: +stderr: +stdout: + +./actions.at:563: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1048: $PREPARSER ./input '(x)' +334. actions.at:488: ok +stderr: + +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1048: $PREPARSER ./input '!' +338. actions.at:1051: testing Printers and Destructors with union: %glr-parser ... +stderr: sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -9702,15 +9742,20 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. +./actions.at:1051: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: 339. actions.at:1053: testing Printers and Destructors: %header lalr1.cc ... +stdout: ./actions.at:1053: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -340. actions.at:1054: testing Printers and Destructors with union: %header lalr1.cc ... +./actions.at:372: $PREPARSER ./input ./actions.at:1048: $PREPARSER ./input '!!!' -335. actions.at:1047: ok -./actions.at:1054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: +1.1 +1.1: syntax error stderr: +./actions.at:372: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -9721,11 +9766,13 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +340. actions.at:1054: testing Printers and Destructors with union: %header lalr1.cc ... +./actions.at:1054: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +324. actions.at:372: ok ./actions.at:1048: $PREPARSER ./input '(y)' - +stdout: stderr: -./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -9739,9 +9786,15 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:373: $PREPARSER ./input +stderr: + +1.1 +1.1: syntax error +./actions.at:373: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1048: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: -341. actions.at:1056: testing Printers and Destructors: %header glr.cc ... +325. actions.at:373: ok sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9777,12 +9830,13 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -./actions.at:1056: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1054: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1051: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:1048: $PREPARSER ./input '(x)(x)x' -./actions.at:1053: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +341. actions.at:1056: testing Printers and Destructors: %header glr.cc ... +./actions.at:1056: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: + sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -9803,6 +9857,7 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1053: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./actions.at:1048: $PREPARSER ./input '(x)(x)(x)(x)(x)(x)(x)' stderr: sending: '(' (0@0-9) @@ -9849,91 +9904,51 @@ Freeing nterm line (0@0-29) Parsing FAILED (status 2). ./actions.at:1048: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -336. actions.at:1048: ok -stderr: - -stdout: -./headers.at:323: echo "x4" >>expout -./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x5.cc x5.y -./actions.at:1056: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 342. actions.at:1057: testing Printers and Destructors with union: %header glr.cc ... ./actions.at:1057: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./headers.at:324: $CXX $CPPFLAGS $CXXFLAGS -c -o x5.o x5.cc +336. actions.at:1048: ok +./actions.at:1054: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./actions.at:373: $PREPARSER ./input -stderr: -1.1 -1.1: syntax error -./actions.at:373: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -325. actions.at:373: ok -./actions.at:1057: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: +./headers.at:191: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:1056: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 343. actions.at:1059: testing Printers and Destructors: %header glr2.cc ... ./actions.at:1059: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc +./actions.at:1057: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -stderr: -./actions.at:478: $PREPARSER ./input +./headers.at:323: echo "x4" >>expout +./headers.at:324: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x5.cc x5.y stderr: stdout: -stdout: +./headers.at:188: $CXX $CPPFLAGS $CXXFLAGS -c -o $h.o $h.cc stderr: -./actions.at:478: $PREPARSER ./input +stdout: 307. headers.at:188: ok -stderr: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -331. actions.at:478: ok - -332. actions.at:478: ok +./headers.at:324: $CXX $CPPFLAGS $CXXFLAGS -c -o x5.o x5.cc 344. actions.at:1060: testing Printers and Destructors with union: %header glr2.cc ... - ./actions.at:1060: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./actions.at:1059: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./actions.at:1060: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -346. actions.at:1174: testing Default tagged and per-type %printer and %destructor ... -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -345. actions.at:1071: testing Default tagless %printer and %destructor ... -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y 309. headers.at:191: ok -./actions.at:1060: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ... -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input0.c input0.y -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +345. actions.at:1071: testing Default tagless %printer and %destructor ... +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Werror -stderr: stderr: input.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] input.y:30.3-5: error: useless %printer for type <*> [-Werror=other] -input.y:22.3-4: error: useless %destructor for type <> [-Werror=other] -input.y:22.3-4: error: useless %printer for type <> [-Werror=other] -./actions.at:1233: sed 's,.*/$,,' stderr 1>&2 ./actions.at:1116: sed 's,.*/$,,' stderr 1>&2 -stderr: -input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] -input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other] ./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -./actions.at:1416: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=error ./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none -./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input0 input0.c $LIBS stderr: stdout: ./actions.at:1050: $PREPARSER ./input '(x)' +./actions.at:1116: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -9947,8 +9962,6 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1120: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:1050: $PREPARSER ./input '!' stderr: sending: '!' (0@0-9) @@ -9972,6 +9985,7 @@ Successful parse. ./actions.at:1050: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1050: $PREPARSER ./input '(y)' +./actions.at:1120: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -10049,10 +10063,163 @@ 337. actions.at:1050: ok stderr: +stderr: stdout: -./actions.at:1051: $PREPARSER ./input '(x)' +stdout: +./actions.at:478: $PREPARSER ./input +./actions.at:478: $PREPARSER ./input +stderr: +stderr: +346. actions.at:1174: testing Default tagged and per-type %printer and %destructor ... +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +331. actions.at:478: ok +332. actions.at:478: ok + + +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example: . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: . c A A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 4: %empty . `-> 6: c A A +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: b . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: b . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 6: c A + `-> 4: %empty . +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: c . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: c . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 2: c d + `-> 6: c A + `-> 4: %empty . +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example: b c . A + Shift derivation + a + `-> 1: b d + `-> 6: c . A + Second example: b c . c A A $end + Reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example: b c . c A A $end + First reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 5: a + `-> 1: b d + `-> 3: %empty . `-> 6: c A A + Second example: b c . A $end + Second reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 6: c A + `-> 4: %empty . +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + First example: b c . A + Shift derivation + a + `-> 1: b d + `-> 6: c . A + Second example: b c . A $end + Reduce derivation + $accept + `-> 0: a $end + `-> 1: b d + `-> 5: a + `-> 2: c d + `-> 6: c A + `-> 4: %empty . +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example: b d . + First reduce derivation + a + `-> 1: b d . + Second reduce derivation + a + `-> 1: b d + `-> 7: d . +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example: c d . + First reduce derivation + a + `-> 2: c d . + Second reduce derivation + a + `-> 2: c d + `-> 7: d . +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr 348. actions.at:1429: testing Default %printer and %destructor are not for error or $undefined ... ./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y +347. actions.at:1307: testing Default %printer and %destructor for user-defined end token ... +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input0.c input0.y +stderr: +input.y:22.3-4: error: useless %destructor for type <> [-Werror=other] +input.y:22.3-4: error: useless %printer for type <> [-Werror=other] +./actions.at:1233: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Werror +stderr: +stdout: +stderr: +input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other] +input.y:23.6-8: error: useless %printer for type <*> [-Werror=other] +./actions.at:1051: $PREPARSER ./input '(x)' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10066,8 +10233,12 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1474: sed 's,.*/$,,' stderr 1>&2 ./actions.at:1051: $PREPARSER ./input '!' stderr: +stderr: +input0.y:30.3-5: error: useless %destructor for type <*> [-Werror=other] +input0.y:30.3-5: error: useless %printer for type <*> [-Werror=other] sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -10076,8 +10247,8 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error ./actions.at:1051: $PREPARSER ./input '!!!' -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -10089,7 +10260,10 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1416: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none ./actions.at:1051: $PREPARSER ./input '(y)' +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=error stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -10106,6 +10280,7 @@ ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1051: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10142,11 +10317,7 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input.y:23.6-8: error: useless %destructor for type <*> [-Werror=other] -input.y:23.6-8: error: useless %printer for type <*> [-Werror=other] ./actions.at:1051: $PREPARSER ./input '(x)(x)x' -./actions.at:1474: sed 's,.*/$,,' stderr 1>&2 stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10168,11 +10339,14 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1051: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1233: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y -Wnone,none -Werror --trace=none +338. actions.at:1051: ok +./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none stderr: stdout: ./actions.at:1121: $PREPARSER ./input --debug -338. actions.at:1051: ok + stderr: Starting parse Entering state 0 @@ -10211,40 +10385,92 @@ Cleanup: discarding lookahead token "end of file" (1.5: ) Stack now 0 ./actions.at:1121: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./headers.at:324: echo "x5" >>expout +./actions.at:1237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS 345. actions.at:1071: ok -./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x6.c x6.y +349. actions.at:1532: testing Default %printer and %destructor are not for $accept ... +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input0.c input0.y --warnings=none -Werror --trace=none +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +350. actions.at:1596: testing Default %printer and %destructor for midrule values ... +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input0 input0.c $LIBS +stderr: +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +input.y:24.3-4: error: useless %destructor for type <> [-Werror=other] +input.y:24.3-4: error: useless %printer for type <> [-Werror=other] +./actions.at:1582: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +stderr: +input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] +input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] +input.y:33.3-23: error: unset value: $$ [-Werror=other] +input.y:32.3-23: error: unused value: $3 [-Werror=other] +./actions.at:1634: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none +./actions.at:1586: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none stderr: stdout: -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1416: $PREPARSER ./input0 --debug +./actions.at:1479: $PREPARSER ./input --debug stderr: Starting parse Entering state 0 Stack now 0 -Reducing stack by rule 1 (line 49): --> $$ = nterm start (1.1: <> for 'S' @ 1) +Reading a token +Next token is token 'a' ('a') +Shifting token 'a' ('a') Entering state 1 Stack now 0 1 Reading a token +Next token is token 'b' ('b') +syntax error +Shifting token error () +Entering state 3 +Stack now 0 1 3 +Next token is token 'b' ('b') +Shifting token 'b' ('b') +Entering state 5 +Stack now 0 1 3 5 +Reading a token +Next token is token "invalid token" () +Error: popping token 'b' ('b') +DESTROY 'b' +Stack now 0 1 3 +Error: popping token error () +Stack now 0 1 +Shifting token error () +Entering state 3 +Stack now 0 1 3 +Next token is token "invalid token" () +Error: discarding token "invalid token" () +Error: popping token error () +Stack now 0 1 +Shifting token error () +Entering state 3 +Stack now 0 1 3 +Reading a token Now at end of input. -Shifting token END (1.1: <> for 'E' @ 1) -Entering state 2 -Stack now 0 1 2 -Stack now 0 1 2 -Cleanup: popping token END (1.1: <> for 'E' @ 1) -Cleanup: popping nterm start (1.1: <> for 'S' @ 1) -stderr: +Cleanup: discarding lookahead token "end of file" () +Stack now 0 1 3 +Cleanup: popping token error () +Cleanup: popping token 'a' ('a') +DESTROY 'a' +./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +348. actions.at:1429: ok +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +351. actions.at:1743: testing @$ in %initial-action implies %locations ... +./actions.at:1743: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: stdout: -./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -349. actions.at:1532: testing Default %printer and %destructor are not for $accept ... -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:1238: $PREPARSER ./input --debug stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -10295,130 +10521,113 @@ Stack now 0 Cleanup: discarding lookahead token "end of file" () Stack now 0 +./actions.at:1743: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stdout: +./actions.at:1416: $PREPARSER ./input0 --debug +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror ./actions.at:1238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input1.c input1.y -350. actions.at:1596: testing Default %printer and %destructor for midrule values ... -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1474: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 49): +-> $$ = nterm start (1.1: <> for 'S' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Now at end of input. +Shifting token END (1.1: <> for 'E' @ 1) +Entering state 2 +Stack now 0 1 2 +Stack now 0 1 2 +Cleanup: popping token END (1.1: <> for 'E' @ 1) +Cleanup: popping nterm start (1.1: <> for 'S' @ 1) +./actions.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 346. actions.at:1174: ok -./headers.at:325: $CC $CFLAGS $CPPFLAGS -c -o x6.o x6.c -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror - -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Werror +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input1.c input1.y stderr: -input.y:24.3-4: error: useless %destructor for type <> [-Werror=other] -input.y:24.3-4: error: useless %printer for type <> [-Werror=other] -./actions.at:1478: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] + 24 | %printer { #error "<*> printer should not be used" } <*> + | ^~~ +input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] + 24 | %printer { #error "<*> printer should not be used" } <*> + | ^~~ +input.y:33.3-23: error: unset value: $$ [-Werror=other] + 33 | { @$ = 4; } // Only used. + | ^~~~~~~~~~~~~~~~~~~~~ +input.y:32.3-23: error: unused value: $3 [-Werror=other] + 32 | { USE ($$); @$ = 3; } // Only set. + | ^~~~~~~~~~~~~~~~~~~~~ + +./actions.at:1641: sed 's,.*/$,,' stderr 1>&2 +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +352. actions.at:1744: testing @$ in %destructor implies %locations ... +./actions.at:1744: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Werror stdout: -351. actions.at:1743: testing @$ in %initial-action implies %locations ... -./actions.at:1743: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:374: $PREPARSER ./input -./actions.at:1582: sed 's,.*/$,,' stderr 1>&2 stderr: 1.1 1.1: syntax error ./actions.at:374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] -input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] -input.y:33.3-23: error: unset value: $$ [-Werror=other] -input.y:32.3-23: error: unused value: $3 [-Werror=other] -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error -stderr: -input1.y:30.3-4: error: useless %destructor for type <> [-Werror=other] -input1.y:30.3-4: error: useless %printer for type <> [-Werror=other] +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Werror 326. actions.at:374: ok -./actions.at:1634: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1417: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1743: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=error - -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=error -352. actions.at:1744: testing @$ in %destructor implies %locations ... -./actions.at:1744: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none ./actions.at:1744: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none -./actions.at:1582: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none -./actions.at:1586: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none -./actions.at:1634: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input.c input.y --warnings=none -Werror --trace=none +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none + stderr: -stdout: -./headers.at:325: echo "x6" >>expout -./actions.at:1417: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input1 input1.c $LIBS -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x7.c x7.y stderr: stdout: -./actions.at:1479: $PREPARSER ./input --debug +stdout: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token 'a' ('a') -Shifting token 'a' ('a') -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token 'b' ('b') -syntax error -Shifting token error () -Entering state 3 -Stack now 0 1 3 -Next token is token 'b' ('b') -Shifting token 'b' ('b') -Entering state 5 -Stack now 0 1 3 5 -Reading a token -Next token is token "invalid token" () -Error: popping token 'b' ('b') -DESTROY 'b' -Stack now 0 1 3 -Error: popping token error () -Stack now 0 1 -Shifting token error () -Entering state 3 -Stack now 0 1 3 -Next token is token "invalid token" () -Error: discarding token "invalid token" () -Error: popping token error () -Stack now 0 1 -Shifting token error () -Entering state 3 -Stack now 0 1 3 -Reading a token -Now at end of input. -Cleanup: discarding lookahead token "end of file" () -Stack now 0 1 3 -Cleanup: popping token error () -Cleanup: popping token 'a' ('a') -DESTROY 'a' -./actions.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input1.y:30.3-4: error: useless %destructor for type <> [-Werror=other] +input1.y:30.3-4: error: useless %printer for type <> [-Werror=other] +./actions.at:478: $PREPARSER ./input +349. actions.at:1532: ok +stderr: +./actions.at:1417: sed 's,.*/$,,' stderr 1>&2 +353. actions.at:1745: testing @$ in %printer implies %locations ... +./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -348. actions.at:1429: ok stdout: -352. actions.at:1744: ok -./headers.at:326: $CC $CFLAGS $CPPFLAGS -c -o x7.o x7.c -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=error +./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +./headers.at:324: echo "x5" >>expout +333. actions.at:478: ok +./headers.at:325: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x6.c x6.y +./actions.at:1745: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +354. actions.at:1856: testing Qualified $$ in actions: yacc.c ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +355. actions.at:1856: testing Qualified $$ in actions: glr.c ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y -Wnone,none -Werror --trace=none +./headers.at:325: $CC $CFLAGS $CPPFLAGS -c -o x6.o x6.c +./actions.at:1656: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -stdout: stderr: +stdout: +stdout: +./actions.at:1054: $PREPARSER ./input '(x)' ./actions.at:1056: $PREPARSER ./input '(x)' stderr: -stdout: +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: ')' (2@20-29) +line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (0@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. stderr: -354. actions.at:1856: testing Qualified $$ in actions: yacc.c ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stdout: -353. actions.at:1745: testing @$ in %printer implies %locations ... -./actions.at:1745: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10431,24 +10640,19 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -349. actions.at:1532: 351. actions.at:1743: ok - ok +./actions.at:1054: $PREPARSER ./input '!' stderr: -input.y:24.57-59: error: useless %destructor for type <*> [-Werror=other] - 24 | %printer { #error "<*> printer should not be used" } <*> - | ^~~ -input.y:24.57-59: error: useless %printer for type <*> [-Werror=other] - 24 | %printer { #error "<*> printer should not be used" } <*> - | ^~~ -input.y:33.3-23: error: unset value: $$ [-Werror=other] - 33 | { @$ = 4; } // Only used. - | ^~~~~~~~~~~~~~~~~~~~~ -input.y:32.3-23: error: unused value: $3 [-Werror=other] - 32 | { USE ($$); @$ = 3; } // Only set. - | ^~~~~~~~~~~~~~~~~~~~~ ./actions.at:1056: $PREPARSER ./input '!' +sending: '!' (0@0-9) +sending: END (1@10-19) +raise (4@9-9): %empty +check-spontaneous-errors (5@9-19): error (@9-19) +Freeing token END (1@10-19) +Freeing nterm input (5@0-19) +Successful parse. stderr: - +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o input1.c input1.y --warnings=none -Werror --trace=none sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -10457,14 +10661,10 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./actions.at:1641: sed 's,.*/$,,' stderr 1>&2 -./actions.at:1745: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1054: $PREPARSER ./input '!!!' +stderr: ./actions.at:1056: $PREPARSER ./input '!!!' -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error stderr: -355. actions.at:1856: testing Qualified $$ in actions: glr.c ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -10474,11 +10674,23 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +sending: '!' (0@0-9) +sending: '!' (1@10-19) +sending: '!' (2@20-29) +raise (5@10-29): ! (1@20-29) ! (2@20-29) +check-spontaneous-errors (5@10-29): error (@10-29) +sending: END (3@30-39) +Freeing token END (3@30-39) +Freeing nterm input (5@0-29) +Successful parse. +stdout: ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./actions.at:1056: $PREPARSER ./input '(y)' -356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +./actions.at:1054: $PREPARSER ./input '(y)' +351. actions.at:1743: ok stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -10493,11 +10705,24 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: +sending: '(' (0@0-9) +sending: 'y' (1@10-19) +10.10-19.18: syntax error, unexpected 'y', expecting 'x' +Freeing token 'y' (1@10-19) +sending: ')' (2@20-29) +line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) +sending: END (3@30-39) +input (0@29-29): /* Nothing */ +input (2@0-29): line (-1@0-29) input (0@29-29) +Freeing token END (3@30-39) +Freeing nterm input (2@0-29) +Successful parse. +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1056: $PREPARSER ./input '(xxxxx)(x)(x)y' +./actions.at:1054: $PREPARSER ./input '(xxxxx)(x)(x)y' +stderr: + stderr: -./actions.at:1053: $PREPARSER ./input '(x)' sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10534,27 +10759,69 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +sending: '(' (0@0-9) +sending: 'x' (1@10-19) +thing (1@10-19): 'x' (1@10-19) +sending: 'x' (2@20-29) +thing (2@20-29): 'x' (2@20-29) +sending: 'x' (3@30-39) +30.30-39.38: syntax error, unexpected 'x', expecting ')' +Freeing nterm thing (2@20-29) +Freeing nterm thing (1@10-19) +Freeing token 'x' (3@30-39) +sending: 'x' (4@40-49) +Freeing token 'x' (4@40-49) +sending: 'x' (5@50-59) +Freeing token 'x' (5@50-59) +sending: ')' (6@60-69) +line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) +sending: '(' (7@70-79) +sending: 'x' (8@80-89) +thing (8@80-89): 'x' (8@80-89) +sending: ')' (9@90-99) +line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) +sending: '(' (10@100-109) +sending: 'x' (11@110-119) +thing (11@110-119): 'x' (11@110-119) +sending: ')' (12@120-129) +line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) +sending: 'y' (13@130-139) +input (0@129-129): /* Nothing */ +input (2@100-129): line (10@100-129) input (0@129-129) +input (2@70-129): line (7@70-99) input (2@100-129) +input (2@0-129): line (-1@0-69) input (2@70-129) +130.130-139.138: syntax error, unexpected 'y', expecting END +Freeing nterm input (2@0-129) +Freeing token 'y' (13@130-139) +Parsing FAILED. +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1417: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input1 input1.c $LIBS +./actions.at:1056: $PREPARSER ./input '(x)(x)x' stderr: +./actions.at:1054: $PREPARSER ./input '(x)(x)x' sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) sending: ')' (2@20-29) line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (0@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1856: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./actions.at:1056: $PREPARSER ./input '(x)(x)x' +sending: '(' (3@30-39) +sending: 'x' (4@40-49) +thing (4@40-49): 'x' (4@40-49) +sending: ')' (5@50-59) +line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) +sending: 'x' (6@60-69) +thing (6@60-69): 'x' (6@60-69) +sending: END (7@70-79) +70.70-79.78: syntax error, unexpected END, expecting 'x' +Freeing nterm thing (6@60-69) +Freeing nterm line (3@30-59) +Freeing nterm line (0@0-29) +Freeing token END (7@70-79) +Parsing FAILED. +./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./actions.at:1054: $PREPARSER ./input '(x)' -./actions.at:1053: $PREPARSER ./input '!' +356. actions.at:1856: testing Qualified $$ in actions: lalr1.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10574,18 +10841,147 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. -./actions.at:1056: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +341. actions.at:1056: ok +stdout: +340. actions.at:1054: ok +352. actions.at:1744: ok + + stderr: -sending: '!' (0@0-9) -sending: END (1@10-19) -raise (4@9-9): %empty -check-spontaneous-errors (5@9-19): error (@9-19) -Freeing token END (1@10-19) -Freeing nterm input (5@0-19) -Successful parse. -./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stdout: +./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +353. actions.at:1745: ok +357. actions.at:1856: testing Qualified $$ in actions: glr.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +359. actions.at:1863: testing Destroying lookahead assigned by semantic action ... +358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ... +./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y + +./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +360. actions.at:1918: testing YYBACKUP ... +./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y ./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./headers.at:325: echo "x6" >>expout +./actions.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +./headers.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x7.c x7.y +stdout: +./actions.at:1657: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 30): +-> $$ = nterm $@1 (: ) +Entering state 2 +Stack now 0 2 +Reducing stack by rule 2 (line 31): +-> $$ = nterm @2 (: 2) +Entering state 4 +Stack now 0 2 4 +Reducing stack by rule 3 (line 32): +-> $$ = nterm @3 (: 3) +Entering state 5 +Stack now 0 2 4 5 +Reducing stack by rule 4 (line 33): +-> $$ = nterm @4 (: 4) +Entering state 6 +Stack now 0 2 4 5 6 +Reading a token +Now at end of input. +syntax error +Error: popping nterm @4 (: 4) +DESTROY 4 +Stack now 0 2 4 5 +Error: popping nterm @3 (: 3) +DESTROY 3 +Stack now 0 2 4 +Error: popping nterm @2 (: 2) +DESTROY 2 +Stack now 0 2 +Error: popping nterm $@1 (: ) +Stack now 0 +Cleanup: discarding lookahead token "end of file" (: ) +Stack now 0 +./actions.at:1657: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +350. actions.at:1596: ok +./actions.at:1954: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./headers.at:326: $CC $CFLAGS $CPPFLAGS -c -o x7.o x7.c +361. types.at:25: testing %union vs. api.value.type ... +./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +stderr: +stdout: +./actions.at:1856: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Stack now 0 1 3 +Reducing stack by rule 1 (line 53): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Stack now 0 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed -ne '/ival:/p' stderr +stderr: +stdout: +354. actions.at:1856: ok +./actions.at:1053: $PREPARSER ./input '(x)' +stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10597,23 +10993,11 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -341. actions.at:1056: ok -./actions.at:1053: $PREPARSER ./input '!!!' -./actions.at:1054: $PREPARSER ./input '!' -stderr: -stderr: -./actions.at:1641: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -sending: '!' (0@0-9) -sending: '!' (1@10-19) -sending: '!' (2@20-29) -raise (5@10-29): ! (1@20-29) ! (2@20-29) -check-spontaneous-errors (5@10-29): error (@10-29) -sending: END (3@30-39) -Freeing token END (3@30-39) -Freeing nterm input (5@0-29) -Successful parse. ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +361. types.at:25: ok +./actions.at:1053: $PREPARSER ./input '!' + +stderr: sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -10621,12 +11005,12 @@ Freeing token END (1@10-19) Freeing nterm input (5@0-19) Successful parse. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1053: $PREPARSER ./input '(y)' -./actions.at:1054: $PREPARSER ./input '!!!' -stderr: +./actions.at:1053: $PREPARSER ./input '!!!' stderr: +362. types.at:44: testing %yacc vs. api.value.type=union ... +./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -10636,6 +11020,11 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. +./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +363. types.at:139: testing yacc.c api.value.type={double} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:1053: $PREPARSER ./input '(y)' +stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -10648,13 +11037,8 @@ Freeing token END (3@30-39) Freeing nterm input (2@0-29) Successful parse. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -357. actions.at:1856: testing Qualified $$ in actions: glr.cc ... ./actions.at:1053: $PREPARSER ./input '(xxxxx)(x)(x)y' -./actions.at:1054: $PREPARSER ./input '(y)' -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10692,21 +11076,10 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -sending: '(' (0@0-9) -sending: 'y' (1@10-19) -10.10-19.18: syntax error, unexpected 'y', expecting 'x' -Freeing token 'y' (1@10-19) -sending: ')' (2@20-29) -line (-1@0-29): '(' (0@0-9) error (@10-19) ')' (2@20-29) -sending: END (3@30-39) -input (0@29-29): /* Nothing */ -input (2@0-29): line (-1@0-29) input (0@29-29) -Freeing token END (3@30-39) -Freeing nterm input (2@0-29) -Successful parse. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +362. types.at:44: ok ./actions.at:1053: $PREPARSER ./input '(x)(x)x' -./actions.at:1054: $PREPARSER ./input '(xxxxx)(x)(x)y' +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10727,91 +11100,57 @@ Freeing nterm line (0@0-29) Freeing token END (7@70-79) Parsing FAILED. +stdout: ./actions.at:1053: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./actions.at:1907: $PREPARSER ./input stderr: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: 'x' (2@20-29) -thing (2@20-29): 'x' (2@20-29) -sending: 'x' (3@30-39) -30.30-39.38: syntax error, unexpected 'x', expecting ')' -Freeing nterm thing (2@20-29) -Freeing nterm thing (1@10-19) -Freeing token 'x' (3@30-39) -sending: 'x' (4@40-49) -Freeing token 'x' (4@40-49) -sending: 'x' (5@50-59) -Freeing token 'x' (5@50-59) -sending: ')' (6@60-69) -line (-1@0-69): '(' (0@0-9) error (@10-59) ')' (6@60-69) -sending: '(' (7@70-79) -sending: 'x' (8@80-89) -thing (8@80-89): 'x' (8@80-89) -sending: ')' (9@90-99) -line (7@70-99): '(' (7@70-79) thing (8@80-89) ')' (9@90-99) -sending: '(' (10@100-109) -sending: 'x' (11@110-119) -thing (11@110-119): 'x' (11@110-119) -sending: ')' (12@120-129) -line (10@100-129): '(' (10@100-109) thing (11@110-119) ')' (12@120-129) -sending: 'y' (13@130-139) -input (0@129-129): /* Nothing */ -input (2@100-129): line (10@100-129) input (0@129-129) -input (2@70-129): line (7@70-99) input (2@100-129) -input (2@0-129): line (-1@0-69) input (2@70-129) -130.130-139.138: syntax error, unexpected 'y', expecting END -Freeing nterm input (2@0-129) -Freeing token 'y' (13@130-139) -Parsing FAILED. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +339. actions.at:1053: ok +'b' destructor +'a' destructor +./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +359. actions.at:1863: ok stderr: stdout: -./actions.at:1656: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./headers.at:326: echo "x7" >>expout -339. actions.at:1053: ok +364. types.at:139: testing yacc.c api.value.type={double} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y + + ./headers.at:327: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x8.c x8.y -./actions.at:1856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -./actions.at:1054: $PREPARSER ./input '(x)(x)x' -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: shift/reduce conflict on token J [-Wcounterexamples] -time limit exceeded: 6.000000 - First example H i . J K $end - Shift derivation $accept -> [ a -> [ H i -> [ i . J K ] ] $end ] - Second example H i . J $end - Reduce derivation $accept -> [ s -> [ a -> [ H i . ] J ] $end ] -input.y:4.4-6: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:451: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -stderr: stderr: stdout: -sending: '(' (0@0-9) -sending: 'x' (1@10-19) -thing (1@10-19): 'x' (1@10-19) -sending: ')' (2@20-29) -line (0@0-29): '(' (0@0-9) thing (1@10-19) ')' (2@20-29) -sending: '(' (3@30-39) -sending: 'x' (4@40-49) -thing (4@40-49): 'x' (4@40-49) -sending: ')' (5@50-59) -line (3@30-59): '(' (3@30-39) thing (4@40-49) ')' (5@50-59) -sending: 'x' (6@60-69) -thing (6@60-69): 'x' (6@60-69) -sending: END (7@70-79) -70.70-79.78: syntax error, unexpected END, expecting 'x' -Freeing nterm thing (6@60-69) -Freeing nterm line (3@30-59) -Freeing nterm line (0@0-29) -Freeing token END (7@70-79) -Parsing FAILED. -./actions.at:1054: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -267. counterexample.at:441: ok -./actions.at:1057: $PREPARSER ./input '(x)' +365. types.at:139: testing yacc.c api.value.type={variant} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./actions.at:1417: $PREPARSER ./input1 --debug stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 49): +-> $$ = nterm start (1.1: <*> for 'S' @ 1) +Entering state 1 +Stack now 0 1 +Reading a token +Now at end of input. +Shifting token END (1.1: <*> for 'E' @ 1) +Entering state 2 +Stack now 0 1 2 +Stack now 0 1 2 +Cleanup: popping token END (1.1: <*> for 'E' @ 1) +Cleanup: popping nterm start (1.1: <*> for 'S' @ 1) +./actions.at:1417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +366. types.at:139: testing yacc.c api.value.type={variant} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +347. actions.at:1307: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./headers.at:327: $CC $CFLAGS $CPPFLAGS -c -o x8.o x8.c stderr: stdout: +./actions.at:1057: $PREPARSER ./input '(x)' stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) @@ -10825,39 +11164,12 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -340. actions.at:1054: ok -353. actions.at:1745: ok -stdout: -stderr: - -./actions.at:478: $PREPARSER ./input -./headers.at:327: $CC $CFLAGS $CPPFLAGS -c -o x8.o x8.c -stderr: -stdout: ./actions.at:1057: $PREPARSER ./input '!' -./actions.at:1417: $PREPARSER ./input1 --debug -358. actions.at:1856: testing Qualified $$ in actions: glr2.cc ... -./actions.at:1856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +367. types.at:139: testing yacc.c api.value.type={struct foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -./actions.at:478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 49): --> $$ = nterm start (1.1: <*> for 'S' @ 1) -Entering state 1 -Stack now 0 1 -Reading a token -Now at end of input. -Shifting token END (1.1: <*> for 'E' @ 1) -Entering state 2 -Stack now 0 1 2 -Stack now 0 1 2 -Cleanup: popping token END (1.1: <*> for 'E' @ 1) -Cleanup: popping nterm start (1.1: <*> for 'S' @ 1) - -359. actions.at:1863: testing Destroying lookahead assigned by semantic action ... +stdout: sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -10866,13 +11178,10 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -333. actions.at:478: ok -./actions.at:1417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1905: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./actions.at:1955: $PREPARSER ./input +stderr: +./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1057: $PREPARSER ./input '!!!' -347. actions.at:1307: ok -360. actions.at:1918: testing YYBACKUP ... stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -10884,13 +11193,9 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -361. types.at:25: testing %union vs. api.value.type ... -./actions.at:1953: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./types.at:34: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y - -./actions.at:1856: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - +360. actions.at:1918: ok ./actions.at:1057: $PREPARSER ./input '(y)' +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: sending: '(' (0@0-9) sending: 'y' (1@10-19) @@ -10905,15 +11210,9 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -362. types.at:44: testing %yacc vs. api.value.type=union ... -./types.at:53: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -363. types.at:139: testing yacc.c api.value.type={double} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y + ./actions.at:1057: $PREPARSER ./input '(xxxxx)(x)(x)y' -./actions.at:1954: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -361. types.at:25: ok sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -10950,6 +11249,8 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +368. types.at:139: testing yacc.c api.value.type={struct foo} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./actions.at:1057: $PREPARSER ./input '(x)(x)x' stderr: sending: '(' (0@0-9) @@ -10972,414 +11273,320 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1057: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +342. actions.at:1057: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: stderr: stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1856: $PREPARSER ./input --debug -342. actions.at:1057: stderr: - ok stderr: -stdout: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -362. types.at:44: ok Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token UNTYPED (ival: 10, fval: 0.1) Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 -Stack now 0 1 Reading a token Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 53): +Reducing stack 0 by rule 1 (line 53): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) Entering state 2 -Stack now 0 2 Reading a token Now at end of input. Shifting token "end of file" () Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1657: $PREPARSER ./input --debug +364. types.at:139: ok stderr: +369. types.at:139: testing yacc.c api.value.type={struct bar} ... stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stdout: Starting parse Entering state 0 -Stack now 0 Reading a token Next token is token UNTYPED (ival: 10, fval: 0.1) Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 -Stack now 0 1 Reading a token Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 53): +Reducing stack 0 by rule 1 (line 53): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) Entering state 2 -Stack now 0 2 Reading a token Now at end of input. Shifting token "end of file" () Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed -ne '/ival:/p' stderr -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 30): --> $$ = nterm $@1 (: ) -Entering state 2 -Stack now 0 2 -Reducing stack by rule 2 (line 31): --> $$ = nterm @2 (: 2) -Entering state 4 -Stack now 0 2 4 -Reducing stack by rule 3 (line 32): --> $$ = nterm @3 (: 3) -Entering state 5 -Stack now 0 2 4 5 -Reducing stack by rule 4 (line 33): --> $$ = nterm @4 (: 4) -Entering state 6 -Stack now 0 2 4 5 6 -Reading a token -Now at end of input. -syntax error -Error: popping nterm @4 (: 4) -DESTROY 4 -Stack now 0 2 4 5 -Error: popping nterm @3 (: 3) -DESTROY 3 -Stack now 0 2 4 -Error: popping nterm @2 (: 2) -DESTROY 2 -Stack now 0 2 -Error: popping nterm $@1 (: ) -Stack now 0 -Cleanup: discarding lookahead token "end of file" (: ) -Stack now 0 -./actions.at:1657: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -364. types.at:139: testing yacc.c api.value.type={double} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $PREPARSER ./test +stderr: stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +355. actions.at:1856: ok +363. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +stderr: +370. types.at:139: testing yacc.c api.value.type={struct bar} %header ... +366. types.at:139: ok stdout: -354. actions.at:1856: ok +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./headers.at:327: echo "x8" >>expout +stdout: ./headers.at:328: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o x9.cc x9.y -350. actions.at:1596: ok + +./types.at:139: $PREPARSER ./test +stderr: -365. types.at:139: testing yacc.c api.value.type={variant} ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +371. types.at:139: testing yacc.c api.value.type={union foo} ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -366. types.at:139: testing yacc.c api.value.type={variant} %header ... +365. types.at:139: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +372. types.at:139: testing yacc.c api.value.type={union foo} %header ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -367. types.at:139: testing yacc.c api.value.type={struct foo} ... +373. types.at:139: testing yacc.c %union { float fval; int ival; }; ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: -368. types.at:139: testing yacc.c api.value.type={struct foo} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stdout: -./headers.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o x9.o x9.cc -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./actions.at:1907: $PREPARSER ./input + +./types.at:139: $PREPARSER ./test stderr: -'b' destructor -'a' destructor -./actions.at:1907: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -359. actions.at:1863: ok +374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ... +./headers.at:328: $CXX $CPPFLAGS $CXXFLAGS -c -o x9.o x9.cc +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +367. types.at:139: ok ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -369. types.at:139: testing yacc.c api.value.type={struct bar} ... +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +375. types.at:139: testing yacc.c %union foo { float fval; int ival; }; ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -363. types.at:139: ok - stderr: stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: stdout: -./actions.at:1955: $PREPARSER ./input -stderr: -370. types.at:139: testing yacc.c api.value.type={struct bar} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: +stderr: +368. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +371. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./actions.at:1955: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -364. types.at:139: ok -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -360. actions.at:1918: ok stderr: stdout: +369. types.at:139: ok ./actions.at:1856: $PREPARSER ./input --debug - stderr: + Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token UNTYPED (ival: 10, fval: 0.1) Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 +Stack now 0 1 Reading a token Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Reducing stack 0 by rule 1 (line 53): +Stack now 0 1 3 +Reducing stack by rule 1 (line 55): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) Entering state 2 +Stack now 0 2 Reading a token -Now at end of input. +Next token is token "end of file" () Shifting token "end of file" () Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +376. types.at:139: testing yacc.c %union foo { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y Starting parse Entering state 0 +Stack now 0 Reading a token Next token is token UNTYPED (ival: 10, fval: 0.1) Shifting token UNTYPED (ival: 10, fval: 0.1) Entering state 1 +Stack now 0 1 Reading a token Next token is token INT (ival: 20, fval: 0.2) Shifting token INT (ival: 20, fval: 0.2) Entering state 3 -Reducing stack 0 by rule 1 (line 53): +Stack now 0 1 3 +Reducing stack by rule 1 (line 55): $1 = token UNTYPED (ival: 10, fval: 0.1) $2 = token INT (ival: 20, fval: 0.2) -> $$ = nterm float (ival: 30, fval: 0.3) Entering state 2 +Stack now 0 2 Reading a token -Now at end of input. +Next token is token "end of file" () Shifting token "end of file" () Entering state 4 +Stack now 0 2 4 +Stack now 0 2 4 Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed -ne '/ival:/p' stderr -371. types.at:139: testing yacc.c api.value.type={union foo} ... -372. types.at:139: testing yacc.c api.value.type={union foo} %header ... + +377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +356. actions.at:1856: ok stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stdout: -355. actions.at:1856: ok ./types.at:139: $PREPARSER ./test +378. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +stderr: stderr: ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -368. types.at:139: ok -stderr: -stderr: -stdout: stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: +374. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: +370. types.at:139: ok stdout: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +379. types.at:139: testing yacc.c api.value.type=union ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -373. types.at:139: testing yacc.c %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -369. types.at:139: ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -365. types.at:139: ok -366. types.at:139: ok - -374. types.at:139: testing yacc.c %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -376. types.at:139: testing yacc.c %union foo { float fval; int ival; }; %header ... -375. types.at:139: testing yacc.c %union foo { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -377. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; ... +380. types.at:139: testing yacc.c api.value.type=union %header ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +372. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test +381. types.at:139: testing glr.c api.value.type={double} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: + ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -367. types.at:139: ok -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +373. types.at:139: ok +382. types.at:139: testing glr.c api.value.type={double} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -378. types.at:139: testing yacc.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... +383. types.at:139: testing glr.c api.value.type={variant} ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -370. types.at:139: ok ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -372. types.at:139: ok - +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +375. types.at:139: ok -379. types.at:139: testing yacc.c api.value.type=union ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -380. types.at:139: testing yacc.c api.value.type=union %header ... +384. types.at:139: testing glr.c api.value.type={variant} %header ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stderr: stdout: stdout: ./types.at:139: $PREPARSER ./test stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +380. types.at:139: ok +379. types.at:139: ok stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -371. types.at:139: ok ./types.at:139: $PREPARSER ./test -374. types.at:139: ok -stderr: -376. types.at:139: ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -373. types.at:139: ok - - stderr: -381. types.at:139: testing glr.c api.value.type={double} ... -383. types.at:139: testing glr.c api.value.type={variant} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -382. types.at:139: testing glr.c api.value.type={double} %header ... +stderr: stdout: -384. types.at:139: testing glr.c api.value.type={variant} %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +377. types.at:139: ok stdout: +385. types.at:139: testing glr.c api.value.type={struct foo} ... ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -377. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -375. types.at:139: ok - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS - -385. types.at:139: testing glr.c api.value.type={struct foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y 386. types.at:139: testing glr.c api.value.type={struct foo} %header ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -380. types.at:139: ok -stdout: -./types.at:139: $PREPARSER ./test -stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 378. types.at:139: ok +376. types.at:139: ok -stderr: + +387. types.at:139: testing glr.c api.value.type={struct bar} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +388. types.at:139: testing glr.c api.value.type={struct bar} %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +389. types.at:139: testing glr.c api.value.type={union foo} ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: -stdout: -./actions.at:1856: $PREPARSER ./input --debug ./actions.at:1856: $PREPARSER ./input --debug +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token "end of file" () -Shifting token "end of file" () -Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -387. types.at:139: testing glr.c api.value.type={struct bar} ... -stderr: -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS Starting parse Entering state 0 Reading a token @@ -11402,7 +11609,6 @@ Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: Starting parse Entering state 0 @@ -11426,121 +11632,22 @@ Cleanup: popping token "end of file" () Cleanup: popping nterm float (ival: 30, fval: 0.3) ./actions.at:1856: sed -ne '/ival:/p' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Stack now 0 1 3 -Reducing stack by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Stack now 0 2 -Reading a token -Next token is token "end of file" () -Shifting token "end of file" () -Entering state 4 -Stack now 0 2 4 -Stack now 0 2 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed -ne '/ival:/p' stderr +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS 357. actions.at:1856: ok -388. types.at:139: testing glr.c api.value.type={struct bar} %header ... -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -356. actions.at:1856: ok -stdout: -./types.at:139: $PREPARSER ./test -stderr: - -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -379. types.at:139: ok -389. types.at:139: testing glr.c api.value.type={union foo} ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS 390. types.at:139: testing glr.c api.value.type={union foo} %header ... ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -391. types.at:139: testing glr.c %union { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -stderr: -stdout: -./headers.at:328: echo "x9" >>expout -./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xa.cc xa.y -./headers.at:329: $CXX $CPPFLAGS $CXXFLAGS -c -o xa.o xa.cc -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -384. types.at:139: ok - -392. types.at:139: testing glr.c %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: stdout: -stderr: -./types.at:139: $PREPARSER ./test -stdout: -stderr: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -383. types.at:139: ok -385. types.at:139: ok -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - -381. types.at:139: ok ./types.at:139: $PREPARSER ./test stderr: -390. types.at:139: ok -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -382. types.at:139: ok -./types.at:139: $PREPARSER ./test -stderr: stderr: stdout: ./actions.at:1059: $PREPARSER ./input '(x)' -394. types.at:139: testing glr.c %union foo { float fval; int ival; }; %header ... - - -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +381. types.at:139: ok stderr: -393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11553,11 +11660,13 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -386. types.at:139: ok - +stderr: +stdout: +./headers.at:328: echo "x9" >>expout +./headers.at:329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xa.cc xa.y ./actions.at:1059: $PREPARSER ./input '!' stderr: -395. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; ... + sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -11566,15 +11675,7 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -396. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y - -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./actions.at:1059: $PREPARSER ./input '!!!' -397. types.at:139: testing glr.c api.value.type=union ... -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS stderr: sending: '!' (0@0-9) sending: '!' (1@10-19) @@ -11586,11 +11687,11 @@ Freeing nterm input (5@0-29) Successful parse. ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +391. types.at:139: testing glr.c %union { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./actions.at:1059: $PREPARSER ./input '(y)' -398. types.at:139: testing glr.c api.value.type=union %header ... +./headers.at:329: $CXX $CPPFLAGS $CXXFLAGS -c -o xa.o xa.cc stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y sending: '(' (0@0-9) sending: 'y' (1@10-19) 10.10-19.18: syntax error, unexpected 'y', expecting 'x' @@ -11604,9 +11705,10 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./actions.at:1059: $PREPARSER ./input '(xxxxx)(x)(x)y' stderr: +stderr: +stdout: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11643,10 +11745,13 @@ Freeing token 'y' (13@130-139) Parsing FAILED. ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1059: $PREPARSER ./input '(x)(x)x' stderr: -./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +386. types.at:139: ok sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11668,57 +11773,53 @@ Parsing FAILED. ./actions.at:1059: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 343. actions.at:1059: ok -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -391. types.at:139: ok -399. types.at:139: testing lalr1.cc api.value.type={double} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +393. types.at:139: testing glr.c %union foo { float fval; int ival; }; ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +392. types.at:139: testing glr.c %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: stdout: -400. types.at:139: testing lalr1.cc api.value.type={double} %header ... +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: $PREPARSER ./test stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -388. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +384. types.at:139: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS -401. types.at:139: testing lalr1.cc api.value.type={variant} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: +stderr: ./types.at:139: $PREPARSER ./test +394. types.at:139: testing glr.c %union foo { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -389. types.at:139: ok +stdout: stderr: stdout: - +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -387. types.at:139: ok -402. types.at:139: testing lalr1.cc api.value.type={variant} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +383. types.at:139: ok +382. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +388. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -403. types.at:139: testing lalr1.cc api.value.type={struct foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS + +395. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; ... + +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +396. types.at:139: testing glr.c api.value.union.name=foo; %union { float fval; int ival; }; %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +397. types.at:139: testing glr.c api.value.type=union ... stderr: stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y ./types.at:139: $PREPARSER ./test stderr: stderr: @@ -11726,224 +11827,39 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: -392. types.at:139: ok +390. types.at:139: ok +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -396. types.at:139: ok +389. types.at:139: ok -404. types.at:139: testing lalr1.cc api.value.type={struct foo} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -405. types.at:139: testing lalr1.cc api.value.type={struct bar} ... +398. types.at:139: testing glr.c api.value.type=union %header ... +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.c test.y +399. types.at:139: testing lalr1.cc api.value.type={double} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o test test.c $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: -./headers.at:329: echo "xa" >>expout -./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xb.cc xb.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./headers.at:330: $CXX $CPPFLAGS $CXXFLAGS -c -o xb.o xb.cc stderr: stdout: -./actions.at:1856: $PREPARSER ./input --debug -stderr: -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example: . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: . c A A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 4: %empty . `-> 6: c A A -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: b . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: b . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 6: c A - `-> 4: %empty . -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: c . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: c . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example: b c . A - Shift derivation - a - `-> 1: b d - `-> 6: c . A - Second example: b c . c A A $end - Reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example: b c . c A A $end - First reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 5: a - `-> 1: b d - `-> 3: %empty . `-> 6: c A A - Second example: b c . A $end - Second reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example: b c . A - Shift derivation - a - `-> 1: b d - `-> 6: c . A - Second example: b c . A $end - Reduce derivation - $accept - `-> 0: a $end - `-> 1: b d - `-> 5: a - `-> 2: c d - `-> 6: c A - `-> 4: %empty . -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: b d . - First reduce derivation - a - `-> 1: b d . - Second reduce derivation - a - `-> 1: b d - `-> 7: d . -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example: c d . - First reduce derivation - a - `-> 2: c d . - Second reduce derivation - a - `-> 2: c d - `-> 7: d . -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g;s/ *$//;' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token UNTYPED (ival: 10, fval: 0.1) -Shifting token UNTYPED (ival: 10, fval: 0.1) -Entering state 1 -Reading a token -Next token is token INT (ival: 20, fval: 0.2) -Shifting token INT (ival: 20, fval: 0.2) -Entering state 3 -Reducing stack 0 by rule 1 (line 55): - $1 = token UNTYPED (ival: 10, fval: 0.1) - $2 = token INT (ival: 20, fval: 0.2) --> $$ = nterm float (ival: 30, fval: 0.3) -Entering state 2 -Reading a token -Now at end of input. -Shifting token "end of file" () -Entering state 4 -Cleanup: popping token "end of file" () -Cleanup: popping nterm float (ival: 30, fval: 0.3) -./actions.at:1856: sed -ne '/ival:/p' stderr -stderr: -./counterexample.at:621: YYFLAT=1; export YYFLAT;COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wcounterexamples input.y stdout: -stderr: -358. actions.at:1856: ok ./types.at:139: $PREPARSER ./test -stderr: -stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: stderr: -395. types.at:139: ok -stdout: -./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +385. types.at:139: ok +387. types.at:139: ok + + stderr: -./types.at:139: $PREPARSER ./test stdout: ./actions.at:1060: $PREPARSER ./input '(x)' stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -393. types.at:139: ok -stderr: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -11956,15 +11872,14 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ... +401. types.at:139: testing lalr1.cc api.value.type={variant} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +400. types.at:139: testing lalr1.cc api.value.type={double} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -394. types.at:139: ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1060: $PREPARSER ./input '!' stderr: -398. types.at:139: - ok sending: '!' (0@0-9) sending: END (1@10-19) raise (4@9-9): %empty @@ -11973,14 +11888,8 @@ Freeing nterm input (5@0-19) Successful parse. ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -stdout: ./actions.at:1060: $PREPARSER ./input '!!!' -./types.at:139: $PREPARSER ./test stderr: -stderr: - sending: '!' (0@0-9) sending: '!' (1@10-19) sending: '!' (2@20-29) @@ -11990,17 +11899,9 @@ Freeing token END (3@30-39) Freeing nterm input (5@0-29) Successful parse. - ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -407. types.at:139: testing lalr1.cc api.value.type={union foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -408. types.at:139: testing lalr1.cc api.value.type={union foo} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -397. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./actions.at:1060: $PREPARSER ./input '(y)' stderr: sending: '(' (0@0-9) @@ -12016,15 +11917,10 @@ Freeing nterm input (2@0-29) Successful parse. ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./actions.at:1060: $PREPARSER ./input '(xxxxx)(x)(x)y' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: - -410. types.at:139: testing lalr1.cc %union { float fval; int ival; }; %header ... +stderr: +stdout: sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -12060,12 +11956,56 @@ Freeing nterm input (2@0-129) Freeing token 'y' (13@130-139) Parsing FAILED. -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./actions.at:1856: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./actions.at:1060: $PREPARSER ./input '(x)(x)x' stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token UNTYPED (ival: 10, fval: 0.1) +Shifting token UNTYPED (ival: 10, fval: 0.1) +Entering state 1 +Reading a token +Next token is token INT (ival: 20, fval: 0.2) +Shifting token INT (ival: 20, fval: 0.2) +Entering state 3 +Reducing stack 0 by rule 1 (line 55): + $1 = token UNTYPED (ival: 10, fval: 0.1) + $2 = token INT (ival: 20, fval: 0.2) +-> $$ = nterm float (ival: 30, fval: 0.3) +Entering state 2 +Reading a token +Now at end of input. +Shifting token "end of file" () +Entering state 4 +Cleanup: popping token "end of file" () +Cleanup: popping nterm float (ival: 30, fval: 0.3) +./actions.at:1856: sed -ne '/ival:/p' stderr sending: '(' (0@0-9) sending: 'x' (1@10-19) thing (1@10-19): 'x' (1@10-19) @@ -12086,14 +12026,26 @@ Freeing token END (7@70-79) Parsing FAILED. ./actions.at:1060: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -411. types.at:139: testing lalr1.cc api.value.type=union ... +358. actions.at:1856: ok +344. actions.at:1060: ok + +stderr: + +stdout: +402. types.at:139: testing lalr1.cc api.value.type={variant} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -344. actions.at:1060: ok +./types.at:139: $PREPARSER ./test +stderr: +403. types.at:139: testing lalr1.cc api.value.type={struct foo} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +393. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -412. types.at:139: testing lalr1.cc api.value.type=union %header ... + +404. types.at:139: testing lalr1.cc api.value.type={struct foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12102,67 +12054,96 @@ ./types.at:139: $PREPARSER ./test stderr: stderr: -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ./types.at:139: $PREPARSER ./test +396. types.at:139: ok stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +395. types.at:139: ok + + +405. types.at:139: testing lalr1.cc api.value.type={struct bar} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +406. types.at:139: testing lalr1.cc api.value.type={struct bar} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: stdout: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +391. types.at:139: ok stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +394. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test +398. types.at:139: ok stderr: + ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: + stdout: + +392. types.at:139: ok +stderr: ./types.at:139: $PREPARSER ./test +stdout: +407. types.at:139: testing lalr1.cc api.value.type={union foo} ... stderr: +./headers.at:329: echo "xa" >>expout +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./headers.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xb.cc xb.y +408. types.at:139: testing lalr1.cc api.value.type={union foo} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +409. types.at:139: testing lalr1.cc %union { float fval; int ival; }; ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +397. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +410. types.at:139: testing lalr1.cc %union { float fval; int ival; }; %header ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./headers.at:330: $CXX $CPPFLAGS $CXXFLAGS -c -o xb.o xb.cc +411. types.at:139: testing lalr1.cc api.value.type=union ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./headers.at:330: echo "xb" >>expout -stderr: -./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xc.cc xc.y -stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12171,11 +12152,11 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' @@ -12189,13 +12170,21 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: stderr: +./types.at:139: $PREPARSER ./test stdout: +stderr: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -12211,24 +12200,29 @@ ./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./headers.at:330: echo "xb" >>expout +./headers.at:331: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xc.cc xc.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./headers.at:331: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xc.o xc.cc ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -12240,41 +12234,41 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12285,11 +12279,11 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -12297,6 +12291,7 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -12304,38 +12299,90 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test +stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -stderr: ./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: -stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] +input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example . c A A $end + First reduce derivation $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ] + Second example . c A A $end + Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example b . c A A $end + First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] + Second example b . A $end + Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example c . c A A $end + First reduce derivation $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] + Second example c . A $end + Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] +time limit exceeded: 6.000000 + First example b c . A + Shift derivation a -> [ b d -> [ c . A ] ] + Second example b c . c A A $end + Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] +input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] + First example b c . c A A $end + First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] + Second example b c . A $end + Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] +input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] + First example b c . A + Shift derivation a -> [ b d -> [ c . A ] ] + Second example b c . A $end + Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example b d . + First reduce derivation a -> [ b d . ] + Second reduce derivation a -> [ b d -> [ d . ] ] +input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] + Example c d . + First reduce derivation a -> [ c d . ] + Second reduce derivation a -> [ c d -> [ d . ] ] +input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] +input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] +./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr +270. counterexample.at:610: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +412. types.at:139: testing lalr1.cc api.value.type=union %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' @@ -12343,12 +12390,20 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -12362,34 +12417,50 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12397,6 +12468,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12405,7 +12477,6 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12413,16 +12484,16 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./headers.at:331: echo "xc" >>expout ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xd.cc xd.y stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +stderr: stderr: stdout: ======== Testing with C++ standard flags: '' @@ -12431,21 +12502,46 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./headers.at:332: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xd.o xd.cc -stdout: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stdout: +./headers.at:331: echo "xc" >>expout +./headers.at:332: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -d -o xd.cc xd.y +./headers.at:332: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS -c -o xd.o xd.cc +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -12454,24 +12550,39 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12483,7 +12594,6 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -12491,23 +12601,32 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -12527,13 +12646,13 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -12552,28 +12671,44 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: +stderr: +./types.at:139: $PREPARSER ./test +stdout: +stderr: +./types.at:139: $PREPARSER ./test stderr: stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -12591,50 +12726,50 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test stderr: -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12642,18 +12777,33 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' @@ -12704,33 +12854,34 @@ } } ' -- *.hh *.h -./headers.at:387: $CC $CFLAGS $CPPFLAGS -c -o c-only.o c-only.c -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./headers.at:387: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx-only.o cxx-only.cc stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: +./headers.at:387: $CC $CFLAGS $CPPFLAGS -c -o c-only.o c-only.c ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./headers.at:387: $CXX $CPPFLAGS $CXXFLAGS -c -o cxx-only.o cxx-only.cc +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./headers.at:387: $CXX $CXXFLAGS $CPPFLAGS $LDFLAGS c-only.o cxx-only.o -o c-and-cxx || exit 77 -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./headers.at:387: $PREPARSER ./c-and-cxx stderr: -./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS stderr: +./headers.at:387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test stderr: +./headers.at:392: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o parser x[1-9a-d].o -DCC_IS_CXX=$CC_IS_CXX main.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -12742,39 +12893,23 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: -./types.at:139: $PREPARSER ./test -stdout: -stderr: ./types.at:139: $PREPARSER ./test stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12782,6 +12917,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12789,59 +12925,58 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./headers.at:394: $PREPARSER ./parser +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./headers.at:394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 310. headers.at:199: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS 413. types.at:139: testing lalr1.cc api.value.type=variant ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stdout: ./types.at:139: $PREPARSER ./test stderr: -stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12859,72 +12994,19 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -input.y: warning: 1 shift/reduce conflict [-Wconflicts-sr] -input.y: warning: 6 reduce/reduce conflicts [-Wconflicts-rr] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example . c A A $end - First reduce derivation $accept -> [ a -> [ b -> [ . ] d -> [ c A A ] ] $end ] - Second example . c A A $end - Second reduce derivation $accept -> [ a -> [ c -> [ . ] d -> [ c A A ] ] $end ] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example b . c A A $end - First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] - Second example b . A $end - Second reduce derivation $accept -> [ a -> [ b d -> [ c -> [ . ] A ] ] $end ] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example c . c A A $end - First reduce derivation $accept -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] $end ] - Second example c . A $end - Second reduce derivation $accept -> [ a -> [ c d -> [ c -> [ . ] A ] ] $end ] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] -time limit exceeded: 6.000000 - First example b c . A - Shift derivation a -> [ b d -> [ c . A ] ] - Second example b c . c A A $end - Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] -input.y: warning: reduce/reduce conflict on token A [-Wcounterexamples] - First example b c . c A A $end - First reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ a -> [ b -> [ . ] d -> [ c A A ] ] ] ] ] ] $end ] - Second example b c . A $end - Second reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] -input.y: warning: shift/reduce conflict on token A [-Wcounterexamples] - First example b c . A - Shift derivation a -> [ b d -> [ c . A ] ] - Second example b c . A $end - Reduce derivation $accept -> [ a -> [ b d -> [ a -> [ c d -> [ c -> [ . ] A ] ] ] ] $end ] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example b d . - First reduce derivation a -> [ b d . ] - Second reduce derivation a -> [ b d -> [ d . ] ] -input.y: warning: reduce/reduce conflict on token $end [-Wcounterexamples] - Example c d . - First reduce derivation a -> [ c d . ] - Second reduce derivation a -> [ c d -> [ d . ] ] -input.y:5.4: warning: rule useless in parser due to conflicts [-Wother] -input.y:6.15: warning: rule useless in parser due to conflicts [-Wother] -./counterexample.at:621: sed -e 's/time limit exceeded: [0-9][.0-9]*/time limit exceeded: XXX/g' stderr -270. counterexample.at:610: ok stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: - ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -414. types.at:139: testing lalr1.cc api.value.type=variant %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: @@ -12939,23 +13021,26 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +404. types.at:139: ok + +414. types.at:139: testing lalr1.cc api.value.type=variant %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -12964,7 +13049,12 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -400. types.at:139: ok +401. types.at:139: ok + +415. types.at:139: testing lalr1.cc api.value.type=variant ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -12972,286 +13062,249 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -415. types.at:139: testing lalr1.cc api.value.type=variant ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +408. types.at:139: ok + stderr: stdout: +416. types.at:139: testing lalr1.cc api.value.type=variant %header ... +./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +399. types.at:139: ok + +417. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stdout: -./types.at:139: $PREPARSER ./test stderr: stdout: -stderr: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -402. types.at:139: ok -399. types.at:139: ok +400. types.at:139: ok + +418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - stderr: - -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -417. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -416. types.at:139: testing lalr1.cc api.value.type=variant %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test +406. types.at:139: ok stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -408. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +403. types.at:139: ok +stderr: +stdout: + +./types.at:139: $PREPARSER ./test stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +409. types.at:139: ok +stderr: stdout: ./types.at:139: $PREPARSER ./test +419. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant ... stderr: +======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -418. types.at:139: testing lalr1.cc api.value.type=variant api.token.constructor %header ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + +402. types.at:139: ok +420. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant %header ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +421. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + +422. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -409. types.at:139: ok - -419. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +405. types.at:139: ok stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -405. types.at:139: ok +410. types.at:139: ok +423. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -420. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant %header ... stderr: -======== Testing with C++ standard flags: '' stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -403. types.at:139: ok - -421. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... +424. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... ======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +407. types.at:139: ok + stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +425. types.at:139: testing glr.cc api.value.type={double} ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test stderr: +411. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +426. types.at:139: testing glr.cc api.value.type={double} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: stderr: ./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -407. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test - stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -401. types.at:139: ok -422. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: - ./types.at:139: $PREPARSER ./test stderr: -423. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor ... -======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -stderr: -./types.at:139: $PREPARSER ./test stdout: +./types.at:139: ./check ./types.at:139: $PREPARSER ./test +-std=c++03 not supported +======== Testing with C++ standard flags: '' stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -412. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -406. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -424. types.at:139: testing lalr1.cc %code requires { #include } api.value.type=variant api.token.constructor %header ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - -stderr: -stdout: -425. types.at:139: testing glr.cc api.value.type={double} ... -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -410. types.at:139: ok -stderr: - stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -426. types.at:139: testing glr.cc api.value.type={double} %header ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -404. types.at:139: ok stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -427. types.at:139: testing glr.cc api.value.type={variant} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++11 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -13261,28 +13314,29 @@ stderr: stdout: ./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check --std=c++11 not supported +-std=c++98 not supported ======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -13290,340 +13344,281 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stderr: -stdout: stdout: ./types.at:139: ./check -./types.at:139: $PREPARSER ./test -std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./types.at:139: ./check stderr: +-std=c++11 not supported +======== Testing with C++ standard flags: '' stdout: -411. types.at:139: ok -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: stderr: - stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stderr: stdout: -./types.at:139: ./check ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check -std=c++03 not supported ======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -428. types.at:139: testing glr.cc api.value.type={variant} %header ... -======== Testing with C++ standard flags: '' +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stderr: stdout: +stdout: ./types.at:139: ./check ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check +-std=c++11 not supported +======== Testing with C++ standard flags: '' -std=c++98 not supported ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: ./check --std=c++11 not supported +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: +stdout: +stderr: ./types.at:139: ./check -std=c++03 not supported ======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +412. types.at:139: ok stderr: stdout: + ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stderr: stdout: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./types.at:139: ./check +427. types.at:139: testing glr.cc api.value.type={variant} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $PREPARSER ./test +stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check -stderr: +./types.at:139: $PREPARSER ./test -std=c++11 not supported ======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stderr: -stdout: -stdout: -./types.at:139: ./check -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y --std=c++98 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: -======== Testing with C++ standard flags: '' ./types.at:139: ./check -std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: -stdout: stdout: ./types.at:139: ./check -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -std=c++11 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -stderr: -stdout: -stdout: -./types.at:139: ./check -./types.at:139: ./check ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr --std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $PREPARSER ./test stderr: -./types.at:139: ./check -stdout: --std=c++11 not supported +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check stderr: -======== Testing with C++ standard flags: '' stdout: +-std=c++98 not supported +======== Testing with C++ standard flags: '' ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS --std=c++98 not supported +-std=c++03 not supported ======== Testing with C++ standard flags: '' +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stdout: stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: ./types.at:139: ./check -std=c++11 not supported ======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -stdout: -stderr: ./types.at:139: $PREPARSER ./test stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: -./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $PREPARSER ./test stdout: stderr: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13631,99 +13626,99 @@ stderr: stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: ./types.at:139: $PREPARSER ./test stderr: +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -13733,108 +13728,128 @@ ======== Testing with C++ standard flags: '' stdout: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: ./types.at:139: $PREPARSER ./test +stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test stderr: +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stderr: +stdout: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stderr: +stdout: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check +stderr: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stdout: stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stdout: -./types.at:139: ./check ./types.at:139: $PREPARSER ./test -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -13842,164 +13857,161 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $PREPARSER ./test stderr: stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: ./types.at:139: ./check -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: stderr: stdout: -./types.at:139: ./check +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -14010,34 +14022,32 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -14045,154 +14055,123 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -420. types.at:139: ok -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +424. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -429. types.at:139: testing glr.cc api.value.type={struct foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +428. types.at:139: testing glr.cc api.value.type={variant} %header ... +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: +stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: ./check +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +421. types.at:139: ok + +429. types.at:139: testing glr.cc api.value.type={struct foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +419. types.at:139: ok + +430. types.at:139: testing glr.cc api.value.type={struct foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stderr: -stdout: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -414. types.at:139: ok - stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -stderr: ./types.at:139: $PREPARSER ./test -stdout: stderr: -./types.at:139: $PREPARSER ./test ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +420. types.at:139: ok ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -430. types.at:139: testing glr.cc api.value.type={struct foo} %header ... + +431. types.at:139: testing glr.cc api.value.type={struct bar} ... +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test -stderr: -422. types.at:139: stderr: - ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: -418. types.at:139: ok ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +422. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -424. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -431. types.at:139: testing glr.cc api.value.type={struct bar} ... +432. types.at:139: testing glr.cc api.value.type={struct bar} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - - +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -432. types.at:139: testing glr.cc api.value.type={struct bar} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -433. types.at:139: testing glr.cc api.value.type={union foo} ... -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -421. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +417. types.at:139: ok -419. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -434. types.at:139: testing glr.cc api.value.type={union foo} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -435. types.at:139: testing glr.cc %union { float fval; int ival; }; ... +433. types.at:139: testing glr.cc api.value.type={union foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -413. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: - +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -14200,6 +14179,7 @@ stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' @@ -14210,23 +14190,23 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -417. types.at:139: ok stderr: stdout: +414. types.at:139: ok ./types.at:139: $PREPARSER ./test stderr: - ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -423. types.at:139: ok -437. types.at:139: testing glr.cc api.value.type=union ... +413. types.at:139: ok + + +434. types.at:139: testing glr.cc api.value.type={union foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -438. types.at:139: testing glr.cc api.value.type=union %header ... +435. types.at:139: testing glr.cc %union { float fval; int ival; }; ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14234,63 +14214,53 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test +423. types.at:139: ok stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: + ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -stderr: +436. types.at:139: testing glr.cc %union { float fval; int ival; }; %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $PREPARSER ./test -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +418. types.at:139: ok + +437. types.at:139: testing glr.cc api.value.type=union ... ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: @@ -14304,115 +14274,118 @@ stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +426. types.at:139: ok stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test + stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +438. types.at:139: testing glr.cc api.value.type=union %header ... +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +415. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -425. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 439. types.at:139: testing glr2.cc api.value.type={double} ... ======== Testing with C++ standard flags: '' -stderr: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +425. types.at:139: ok + +stderr: stderr: stdout: +stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test stderr: stderr: -stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -427. types.at:139: ok ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 440. types.at:139: testing glr2.cc api.value.type={double} %header ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: ./types.at:139: ./check -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -426. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -441. types.at:139: testing glr2.cc api.value.type={variant} ... -======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +427. types.at:139: ok +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +441. types.at:139: testing glr2.cc api.value.type={variant} ... +stderr: +======== Testing with C++ standard flags: '' +stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' @@ -14423,11 +14396,11 @@ ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -14439,32 +14412,38 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stdout: +stderr: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: $PREPARSER ./test -415. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -442. types.at:139: testing glr2.cc api.value.type={variant} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -14474,33 +14453,14 @@ ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -stderr: -./types.at:139: $PREPARSER ./test -stdout: -stderr: -stderr: ./types.at:139: $PREPARSER ./test -stdout: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -428. types.at:139: ok - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -443. types.at:139: testing glr2.cc api.value.type={struct foo} ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14508,7 +14468,6 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -14517,97 +14476,118 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -416. types.at:139: ok - -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -444. types.at:139: testing glr2.cc api.value.type={struct foo} %header ... -======== Testing with C++ standard flags: '' -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +stdout: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +416. types.at:139: ok + +442. types.at:139: testing glr2.cc api.value.type={variant} %header ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: +stderr: ./types.at:139: $PREPARSER ./test +stdout: stderr: +./types.at:139: ./check ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++98 not supported +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' @@ -14615,13 +14595,13 @@ stderr: stdout: ./types.at:139: ./check --std=c++98 not supported +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++03 not supported +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: @@ -14631,25 +14611,39 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +stderr: +stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test stderr: +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: ./check --std=c++98 not supported +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14659,16 +14653,18 @@ ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check --std=c++03 not supported +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: @@ -14682,78 +14678,89 @@ stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++98 not supported +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $PREPARSER ./test stdout: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: stderr: -./types.at:139: ./check ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr --std=c++03 not supported +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -14764,92 +14771,68 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: -./types.at:139: $PREPARSER ./test -stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $PREPARSER ./test stderr: -stdout: stderr: -./types.at:139: ./check +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: --std=c++03 not supported -======== Testing with C++ standard flags: '' ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $PREPARSER ./test +stderr: stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +-std=c++98 not supported +======== Testing with C++ standard flags: '' stderr: stdout: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check @@ -14857,41 +14840,50 @@ ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stdout: +./types.at:139: ./check stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stdout: +stderr: +./types.at:139: $PREPARSER ./test +stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: ./check +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: stderr: stdout: ./types.at:139: $PREPARSER ./test +./types.at:139: $PREPARSER ./test +stderr: stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14899,6 +14891,7 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14906,8 +14899,6 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -14921,73 +14912,57 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' +./types.at:139: $PREPARSER ./test stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stderr: +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +======== Testing with C++ standard flags: '' +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: ./types.at:139: $PREPARSER ./test stderr: -stdout: -stdout: -./types.at:139: $PREPARSER ./test -./types.at:139: $PREPARSER ./test ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: +stdout: +./types.at:139: $PREPARSER ./test ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check @@ -15008,11 +14983,11 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -15022,33 +14997,36 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +428. types.at:139: ok + +443. types.at:139: testing glr2.cc api.value.type={struct foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stderr: -stdout: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: ./check stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y @@ -15056,50 +15034,64 @@ stdout: ./types.at:139: $PREPARSER ./test stderr: +stderr: +stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: stderr: +./types.at:139: ./check stdout: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' +430. types.at:139: ok stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: + ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: ./check +444. types.at:139: testing glr2.cc api.value.type={struct foo} %header ... +======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +429. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -15107,30 +15099,35 @@ ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +445. types.at:139: testing glr2.cc api.value.type={struct bar} ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -432. types.at:139: ok - -445. types.at:139: testing glr2.cc api.value.type={struct bar} ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -./types.at:139: $PREPARSER ./test stdout: +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: ./check ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -15138,129 +15135,144 @@ stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +438. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +434. types.at:139: ok + +446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ... +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +447. types.at:139: testing glr2.cc api.value.type={union foo} ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -430. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +433. types.at:139: ok stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -446. types.at:139: testing glr2.cc api.value.type={struct bar} %header ... ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +448. types.at:139: testing glr2.cc api.value.type={union foo} %header ... ======== Testing with C++ standard flags: '' +432. types.at:139: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + +449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ... ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: $PREPARSER ./test stderr: -438. types.at:139: ok -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +431. types.at:139: ok +stderr: stderr: + stdout: stdout: -429. types.at:139: ok -./types.at:139: ./check +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - - -434. types.at:139: ok -447. types.at:139: testing glr2.cc api.value.type={union foo} ... +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ... ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - -448. types.at:139: testing glr2.cc api.value.type={union foo} %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -449. types.at:139: testing glr2.cc %union { float fval; int ival; }; ... +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +436. types.at:139: ok + +451. types.at:139: testing glr2.cc api.value.type=union ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stderr: stdout: +stdout: +./types.at:139: ./check ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -436. types.at:139: ok ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: stdout: ./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - stderr: stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -450. types.at:139: testing glr2.cc %union { float fval; int ival; }; %header ... +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +435. types.at:139: ok + +452. types.at:139: testing glr2.cc api.value.type=union %header ... ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -======== Testing with C++ standard flags: '' ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -437. types.at:139: ok - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -451. types.at:139: testing glr2.cc api.value.type=union ... +-std=c++03 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check @@ -15271,125 +15283,124 @@ ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -433. types.at:139: ok - -452. types.at:139: testing glr2.cc api.value.type=union %header ... -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +437. types.at:139: ok stderr: stdout: +stderr: +./types.at:139: $PREPARSER ./test +stdout: ./types.at:139: $PREPARSER ./test stderr: + stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -431. types.at:139: ok -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - +======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS 453. types.at:377: testing lalr1.cc: Named %union ... ./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -stderr: -stdout: 453. types.at:377: ok -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stderr: 454. types.at:377: testing glr.cc: Named %union ... ./types.at:377: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -stderr: stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: ./types.at:139: ./check +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -435. types.at:139: ok 454. types.at:377: ok - +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS 455. scanner.at:326: testing Token numbers: yacc.c ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./types.at:139: $PREPARSER ./test +455. scanner.at:326: ok stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: stderr: + +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $PREPARSER ./test +456. scanner.at:326: testing Token numbers: yacc.c api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: ./types.at:139: ./check --std=c++03 not supported +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -./scanner.at:326: $PREPARSER ./input +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -stderr: -455. scanner.at:326: ok -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./scanner.at:326: $PREPARSER ./input stderr: -stdout: -./types.at:139: ./check ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 456. scanner.at:326: ok - -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS 457. scanner.at:326: testing Token numbers: glr.c ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -458. scanner.at:326: testing Token numbers: glr.c api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: stdout: @@ -15401,172 +15412,172 @@ stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stdout: -stderr: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $PREPARSER ./input -stderr: ======== Testing with C++ standard flags: '' -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -458. scanner.at:326: ok - -459. scanner.at:326: testing Token numbers: lalr1.cc ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check -std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c -./scanner.at:326: $PREPARSER ./input +./types.at:139: $PREPARSER ./test stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -457. scanner.at:326: ok ./types.at:139: ./check -std=c++03 not supported ======== Testing with C++ standard flags: '' -stderr: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: +stdout: +stderr: +./types.at:139: ./check ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -460. scanner.at:326: testing Token numbers: lalr1.cc api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +-std=c++98 not supported +======== Testing with C++ standard flags: '' ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: ./check stderr: -./types.at:139: $PREPARSER ./test +-std=c++03 not supported +======== Testing with C++ standard flags: '' stdout: -./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input +stderr: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stdout: +457. scanner.at:326: ok +./types.at:139: $PREPARSER ./test stderr: stderr: stdout: ./types.at:139: ./check -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -std=c++98 not supported ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +458. scanner.at:326: testing Token numbers: glr.c api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stdout: +./types.at:139: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./scanner.at:326: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: ./types.at:139: ./check -./types.at:139: ./check -std=c++03 not supported ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: ./types.at:139: ./check --std=c++98 not supported +stderr: +-std=c++03 not supported ======== Testing with C++ standard flags: '' +stdout: +./types.at:139: ./check ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: --std=c++98 not supported -======== Testing with C++ standard flags: '' stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $PREPARSER ./test +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check --std=c++03 not supported +-std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -stderr: ./types.at:139: ./check -stdout: -std=c++03 not supported ======== Testing with C++ standard flags: '' -./types.at:139: $PREPARSER ./test -stderr: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./types.at:139: ./check ======== Testing with C++ standard flags: '' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: -./types.at:139: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -stdout: ./types.at:139: ./check -std=c++98 not supported ======== Testing with C++ standard flags: '' @@ -15579,8 +15590,9 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: ./check --std=c++03 not supported +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: @@ -15590,67 +15602,44 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +stderr: ./types.at:139: ./check +stdout: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.c +./scanner.at:326: $PREPARSER ./input stderr: -stdout: +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +458. scanner.at:326: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + stderr: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc stdout: ./types.at:139: $PREPARSER ./test stderr: -./scanner.at:326: $PREPARSER ./input -stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -======== Testing with C++ standard flags: '' -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -459. scanner.at:326: ok -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - -461. scanner.at:326: testing Token numbers: glr.cc ... +459. scanner.at:326: testing Token numbers: lalr1.cc ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: -./types.at:139: ./check --std=c++98 not supported ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: ./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./scanner.at:326: $PREPARSER ./input -stderr: stderr: stdout: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $PREPARSER ./test stderr: -460. scanner.at:326: ok stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' --std=c++03 not supported -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -462. scanner.at:326: testing Token numbers: glr.cc api.token.raw ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./types.at:139: ./check stdout: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -15672,31 +15661,20 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./scanner.at:326: $PREPARSER ./input -stderr: ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -461. scanner.at:326: stderr: - ok -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - -463. scanner.at:326: testing Token numbers: glr2.cc ... -./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: @@ -15707,45 +15685,33 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: +stderr: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stdout: -./types.at:139: $PREPARSER ./test -stderr: -stderr: -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc ./scanner.at:326: $PREPARSER ./input -======== Testing with C++ standard flags: '' stderr: ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -462. scanner.at:326: ok +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +459. scanner.at:326: ok -464. scanner.at:326: testing Token numbers: glr2.cc api.token.raw ... +460. scanner.at:326: testing Token numbers: lalr1.cc api.token.raw ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -stderr: -stdout: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ======== Testing with C++ standard flags: '' ./types.at:139: ./check @@ -15766,10 +15732,10 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -stderr: ./types.at:139: ./check -stdout: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -15778,57 +15744,90 @@ ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: +stderr: +======== Testing with C++ standard flags: '' stdout: +stdout: +./types.at:139: $PREPARSER ./test ./types.at:139: $PREPARSER ./test stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' +stderr: +439. types.at:139: ok +stdout: ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./scanner.at:326: $PREPARSER ./input +stderr: + +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +461. scanner.at:326: testing Token numbers: glr.cc ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +460. scanner.at:326: ok + stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: +462. scanner.at:326: testing Token numbers: glr.cc api.token.raw ... stdout: +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./scanner.at:326: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test @@ -15837,95 +15836,175 @@ ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: -stderr: stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stdout: ./types.at:139: $PREPARSER ./test -./types.at:139: ./check stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +./scanner.at:326: $PREPARSER ./input +stderr: stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +462. scanner.at:326: ok stderr: stdout: ./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc ./scanner.at:326: $PREPARSER ./input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: stderr: +stdout: ./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -463. scanner.at:326: ok +./types.at:139: $PREPARSER ./test +461. scanner.at:326: ok +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +440. types.at:139: ok +463. scanner.at:326: testing Token numbers: glr2.cc ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y + + +464. scanner.at:326: testing Token numbers: glr2.cc api.token.raw ... +./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y 465. scanner.at:326: testing Token numbers: lalr1.d ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y +./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS 465. scanner.at:326: skipped (scanner.at:326) +./scanner.at:326: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' 466. scanner.at:326: testing Token numbers: lalr1.d api.token.raw ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.d input.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test 466. scanner.at:326: skipped (scanner.at:326) +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: 467. scanner.at:326: testing Token numbers: lalr1.java ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 467. scanner.at:326: skipped (scanner.at:326) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 468. scanner.at:326: testing Token numbers: lalr1.java api.token.raw ... ./scanner.at:326: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.java input.y -468. scanner.at:326: skipped (scanner.at:326) +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +468. scanner.at:326: ======== Testing with C++ standard flags: '' + skipped (scanner.at:326) +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS 469. scanner.at:330: testing Token numbers: lalr1.cc api.token.raw api.value.type=variant api.token.constructor ... ./scanner.at:330: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./scanner.at:326: $PREPARSER ./input +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./scanner.at:330: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -464. scanner.at:326: ok - +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -470. calc.at:1334: testing Calculator parse.trace ... -./calc.at:1334: mv calc.y.tmp calc.y - ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1334: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: stderr: stdout: ./types.at:139: $PREPARSER ./test -stdout: stderr: +stderr: +stdout: ./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ======== Testing with C++ standard flags: '' ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +441. types.at:139: ok + +470. calc.at:1334: testing Calculator parse.trace ... +./calc.at:1334: mv calc.y.tmp calc.y + +./calc.at:1334: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1334: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: ./check ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS @@ -17996,9 +18075,6 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stdout: -./types.at:139: $PREPARSER ./test input: | 1 2 ./calc.at:1334: $PREPARSER ./calc input @@ -18025,7 +18101,8 @@ Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc stderr: Starting parse Entering state 0 @@ -18047,7 +18124,9 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 -440. types.at:139: ok +./scanner.at:330: $PREPARSER ./input +stderr: +./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18058,13 +18137,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - +469. scanner.at:330: ok ./calc.at:1334: cat stderr input: | 1//2 ./calc.at:1334: $PREPARSER ./calc input + stderr: -471. calc.at:1336: testing Calculator %header ... Starting parse Entering state 0 Stack now 0 @@ -18093,12 +18172,7 @@ Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: mv calc.y.tmp calc.y - stderr: -stderr: -stdout: -./types.at:139: $PREPARSER ./test Starting parse Entering state 0 Stack now 0 @@ -18126,9 +18200,9 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 -./calc.at:1336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +471. calc.at:1336: testing Calculator %header ... +./calc.at:1336: mv calc.y.tmp calc.y + ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18139,15 +18213,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' +./calc.at:1336: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1334: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./types.at:139: $PREPARSER ./test input: | error -stderr: ./calc.at:1334: $PREPARSER ./calc input stderr: Starting parse @@ -18159,7 +18228,6 @@ Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -18169,9 +18237,6 @@ syntax error Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 -======== Testing with C++ standard flags: '' -./calc.at:1336: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18182,20 +18247,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1336: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS ./calc.at:1334: cat stderr -stderr: -stdout: -stderr: -stdout: -./types.at:139: ./check -./types.at:139: $PREPARSER ./test input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: | 1 = 2 = 3 ./calc.at:1334: $PREPARSER ./calc input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -18237,7 +18294,6 @@ Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: Starting parse Entering state 0 Stack now 0 @@ -18277,11 +18333,6 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.1: ) Stack now 0 -stdout: -439. types.at:139: ok -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18292,14 +18343,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1334: cat stderr -472. calc.at:1337: testing Calculator %debug %locations ... -./calc.at:1337: mv calc.y.tmp calc.y - input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y | | +1 ./calc.at:1334: $PREPARSER ./calc input @@ -18356,7 +18401,6 @@ Stack now 0 Cleanup: discarding lookahead token '+' (1.1: ) Stack now 0 -stderr: ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18367,15 +18411,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: stdout: ./types.at:139: $PREPARSER ./test stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1334: cat stderr -======== Testing with C++ standard flags: '' +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1334: $PREPARSER ./calc /dev/null +442. types.at:139: ok stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS Starting parse Entering state 0 Stack now 0 @@ -18385,7 +18429,6 @@ Cleanup: discarding lookahead token "end of input" (1.1: ) Stack now 0 ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: Starting parse Entering state 0 @@ -18395,10 +18438,7 @@ syntax error Cleanup: discarding lookahead token "end of input" (1.1: ) Stack now 0 -stderr: -stdout: -./types.at:139: $PREPARSER ./test -stderr: + ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -18409,19 +18449,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -stdout: -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y ./calc.at:1334: cat stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +472. calc.at:1337: testing Calculator %debug %locations ... +./calc.at:1337: mv calc.y.tmp calc.y + +./calc.at:1337: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1334: $PREPARSER ./calc input stderr: -stderr: Starting parse Entering state 0 Stack now 0 @@ -18740,8 +18776,6 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1336: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: Starting parse Entering state 0 @@ -19060,19 +19094,6 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1336: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -input: ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -19083,30 +19104,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1336: $PREPARSER ./calc input -stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: ./calc.at:1334: cat stderr -stderr: -./types.at:139: ./check -./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y input: +./calc.at:1337: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS | (!!) + (1 2) = 1 ./calc.at:1334: $PREPARSER ./calc input stderr: @@ -19252,7 +19252,6 @@ Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -19393,11 +19392,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) - | 1 2 -./calc.at:1336: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1336: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -19408,25 +19405,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error +./calc.at:1336: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: ./calc.at:1334: cat stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS input: | (- *) + (1 2) = 1 ./calc.at:1334: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1336: $PREPARSER ./calc input +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1336: cat stderr Starting parse Entering state 0 Stack now 0 @@ -19577,7 +19588,7 @@ Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +stderr: Starting parse Entering state 0 Stack now 0 @@ -19726,15 +19737,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) - | 1//2 -./calc.at:1336: $PREPARSER ./calc input -stderr: -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -syntax error -stdout: +input: ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -19745,27 +19750,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $PREPARSER ./test + | 1 2 +./calc.at:1336: $PREPARSER ./calc input +stdout: +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./scanner.at:326: $PREPARSER ./input ./calc.at:1334: cat stderr -======== Testing with C++ standard flags: '' +stderr: +stderr: +syntax error +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS | (* *) + (*) + (*) -./calc.at:1336: cat stderr ./calc.at:1334: $PREPARSER ./calc input stderr: +463. scanner.at:326: ok Starting parse Entering state 0 Stack now 0 @@ -19918,10 +19920,16 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | error -./calc.at:1336: $PREPARSER ./calc input -stderr: +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -20074,10 +20082,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error + +./calc.at:1336: cat stderr ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20088,19 +20094,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: + | 1//2 +./calc.at:1336: $PREPARSER ./calc input +stderr: ./calc.at:1334: cat stderr +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +473. calc.at:1338: testing Calculator %locations api.location.type={Span} ... +./calc.at:1338: mv calc.y.tmp calc.y + +stderr: +syntax error input: -./calc.at:1336: cat stderr +./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y | 1 + 2 * 3 + !+ ++ ./calc.at:1334: $PREPARSER ./calc input stderr: @@ -20185,13 +20192,16 @@ Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 = 2 = 3 -./calc.at:1336: $PREPARSER ./calc input -stderr: -stdout: -./types.at:139: ./check +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -20274,14 +20284,11 @@ Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) ./calc.at:1334: $EGREP -c -v 'Return for a new token:|LAC:' stderr -syntax error -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error +./calc.at:1336: cat stderr input: | 1 + 2 * 3 + !- ++ ./calc.at:1334: $PREPARSER ./calc input +input: stderr: Starting parse Entering state 0 @@ -20363,9 +20370,112 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) +stderr: + | error ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1336: $PREPARSER ./calc input +stdout: +stderr: +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./scanner.at:326: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) stderr: +syntax error +./scanner.at:326: $PREPARSER ./input stderr: +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./scanner.at:326: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20376,6 +20486,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +464. scanner.at:326: ok +stdout: +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1334: cat stderr +stderr: +./calc.at:1336: cat stderr +stdout: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./types.at:139: $PREPARSER ./test + | 1 + 2 * 3 + !* ++ +./calc.at:1334: $PREPARSER ./calc input +stderr: +======== Testing with C++ standard flags: '' +stderr: +input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -20446,18 +20575,403 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + | 1 = 2 = 3 +./calc.at:1336: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.1: ) Cleanup: popping nterm exp (1.1: 7) +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +syntax error +./calc.at:1334: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +474. calc.at:1340: testing Calculator %name-prefix "calc" ... +./calc.at:1340: mv calc.y.tmp calc.y + +./calc.at:1340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1334: cat stderr +./calc.at:1336: cat stderr +input: + | (#) + (#) = 2222 +./calc.at:1334: $PREPARSER ./calc input +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1336: $PREPARSER ./calc input +stderr: +stderr: +syntax error +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: stdout: +syntax error ./calc.at:1337: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: +stdout: ./calc.at:1337: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -20469,8 +20983,6 @@ || /\t/ )' calc.c -./calc.at:1336: cat stderr -input: ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -20481,6 +20993,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: ./check +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -20495,12 +21020,11 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1337: $PREPARSER ./calc input -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1334: cat stderr +./calc.at:1340: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: - | - | +1 -./calc.at:1336: $PREPARSER ./calc input +./calc.at:1336: cat stderr +input: Starting parse Entering state 0 Stack now 0 @@ -21518,12 +22042,114 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) + | (1 + #) = 1111 +./calc.at:1334: $PREPARSER ./calc input ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: cat stderr +stderr: +./calc.at:1336: $PREPARSER ./calc /dev/null +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error ./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Stack now 0 @@ -22542,337 +23168,39 @@ Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: stderr: - | 1 + 2 * 3 + !* ++ -syntax error -./calc.at:1334: $PREPARSER ./calc input -input: stderr: - | 1 2 -./calc.at:1337: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) stderr: -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: cat stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1336: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1337: cat stderr -syntax error -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: cat stderr -input: -stderr: - | 1//2 -./calc.at:1337: $PREPARSER ./calc input -syntax error -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -./calc.at:1334: $PREPARSER ./calc input -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Stack now 0 4 12 21 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error -Error: popping token '/' (1.2: ) -Stack now 0 8 +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stdout: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 Stack now 0 4 -Reading a token -syntax error: invalid character: '#' Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 @@ -22896,69 +23224,27 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 1111) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -22966,7 +23252,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -22984,8 +23270,14 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./types.at:139: $PREPARSER ./test +input: +stderr: + | 1 2 +./calc.at:1337: $PREPARSER ./calc input +stderr: ./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -22996,9 +23288,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -23008,132 +23318,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -441. types.at:139: ok +Next token is token "number" (1.3: 2) +1.3: syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +./calc.at:1334: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1336: cat stderr -./calc.at:1337: cat stderr -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -23144,49 +23357,12 @@ }eg ' expout || exit 77 input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1336: $PREPARSER ./calc input -input: - | error -./calc.at:1337: $PREPARSER ./calc input -stderr: - -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1334: cat stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -input: - | (1 + #) = 1111 + | (# + 1) = 1111 ./calc.at:1334: $PREPARSER ./calc input +input: stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1336: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -23196,26 +23372,7 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 @@ -23227,6 +23384,22 @@ Entering state 11 Stack now 0 4 11 Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 @@ -23287,32 +23460,13 @@ Cleanup: popping nterm input (1.1: ) stderr: ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -473. calc.at:1338: testing Calculator %locations api.location.type={Span} ... -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: $PREPARSER ./test -./calc.at:1338: mv calc.y.tmp calc.y - Starting parse Entering state 0 Stack now 0 @@ -23322,26 +23476,7 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 @@ -23353,6 +23488,22 @@ Entering state 11 Stack now 0 4 11 Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 @@ -23411,15 +23562,14 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -stderr: -./calc.at:1338: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -stdout: -./scanner.at:330: $EGREP -c 'yytranslate\[\]|translate_table\[\]|translate_table =|translate_table_ =' input.cc -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1337: cat stderr -./calc.at:1336: cat stderr -./scanner.at:330: $PREPARSER ./input +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +input: ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -23430,15 +23580,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' -stderr: -input: - | 1 = 2 = 3 + | 1//2 ./calc.at:1337: $PREPARSER ./calc input -input: +./calc.at:1334: cat stderr stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./scanner.at:330: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -23453,41 +23598,31 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error +Error: popping token '/' (1.2: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 - | (!!) + (1 2) = 1 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1334: cat stderr -stderr: -469. scanner.at:330: ok -syntax error -error: 2222 != 1 -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -23502,38 +23637,25 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error +Error: popping token '/' (1.2: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -stderr: input: -syntax error -error: 2222 != 1 - | (# + 1) = 1111 + | (1 + # + 1) = 1111 ./calc.at:1334: $PREPARSER ./calc input stderr: +./calc.at:1336: cat stderr +input: ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -23553,7 +23675,26 @@ Entering state 4 Stack now 0 4 Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 @@ -23639,9 +23780,14 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) - ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1336: $PREPARSER ./calc input +stderr: stderr: +syntax error +error: 2222 != 1 +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -23651,7 +23797,26 @@ Entering state 4 Stack now 0 4 Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 @@ -23737,8 +23902,43 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) +stderr: +stdout: +stderr: ./calc.at:1337: cat stderr -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +syntax error +error: 2222 != 1 +input: +stderr: +./calc.at:1338: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + + | error +stdout: +./calc.at:1337: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -23748,42 +23948,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 | - | +1 -./calc.at:1337: $PREPARSER ./calc input + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1336: cat stderr stderr: -474. calc.at:1340: testing Calculator %name-prefix "calc" ... -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -23793,47 +23975,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1340: mv calc.y.tmp calc.y - -stderr: -./types.at:139: ./check -./calc.at:1340: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Next token is token "invalid token" (1.1: ) +1.1: syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | (- *) + (1 2) = 1 -./calc.at:1336: $PREPARSER ./calc input +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: -syntax error -syntax error -error: 2222 != 1 +stderr: +./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1334: cat stderr -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -23844,26 +24002,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1337: cat stderr -syntax error -syntax error -error: 2222 != 1 +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1336: cat stderr input: - | (1 + # + 1) = 1111 -./calc.at:1337: $PREPARSER ./calc /dev/null + | (1 + 1) / (1 - 1) ./calc.at:1334: $PREPARSER ./calc input stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Stack now 0 @@ -23888,71 +24033,98 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) + $2 = nterm exp (1.1: 2) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -23960,7 +24132,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) + $1 = nterm exp (1.1: 2) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -23979,26 +24151,22 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1338: $PREPARSER ./calc input +input: + | (- *) + (1 2) = 1 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1337: cat stderr +stderr: +1.3: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -24023,79 +24191,106 @@ Entering state 21 Stack now 0 4 12 21 Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token Next token is token ')' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) + $2 = nterm exp (1.1: 2) $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -24113,7 +24308,106 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1337: "$PERL" -pi -e 'use strict; +input: +stderr: +1.3: syntax error +syntax error +syntax error +error: 2222 != 1 + | 1 = 2 = 3 +./calc.at:1337: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24123,9 +24417,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1340: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1336: cat stderr ./calc.at:1334: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -24136,20 +24427,267 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1336: cat stderr +./calc.at:1338: cat stderr +./calc.at:1334: cat stderr input: input: +./calc.at:1337: cat stderr | (* *) + (*) + (*) -./calc.at:1334: cat stderr ./calc.at:1336: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +470. calc.at:1334: ok + | 1//2 +./calc.at:1338: $PREPARSER ./calc input +stderr: +stderr: +syntax error +syntax error +syntax error +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +input: + | + | +1 +./types.at:139: ./check ./calc.at:1337: $PREPARSER ./calc input stderr: stderr: +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +syntax error syntax error syntax error +1.3: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stdout: +./calc.at:1340: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1338: cat stderr +input: +./calc.at:1336: cat stderr +475. calc.at:1341: testing Calculator %verbose ... + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1337: cat stderr +./calc.at:1341: mv calc.y.tmp calc.y + +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS + | error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc /dev/null +input: +stderr: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1336: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +1.1: syntax error +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +input: + | 1 2 +./calc.at:1340: $PREPARSER ./calc input +1.1: syntax error +stderr: +stderr: +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1341: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: syntax error +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1338: cat stderr +stderr: +./calc.at:1337: cat stderr ./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 = 2 = 3 +./calc.at:1338: $PREPARSER ./calc input +stderr: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +./calc.at:1337: $PREPARSER ./calc input +1.7: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -24468,8 +25006,9 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: cat stderr stderr: -input: +1.7: syntax error stderr: Starting parse Entering state 0 @@ -24788,13 +25327,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + 1) / (1 - 1) -syntax error -syntax error -syntax error -./calc.at:1334: $PREPARSER ./calc input -stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; +input: + | 1//2 +./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24804,150 +25339,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1334: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -24957,153 +25349,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1337: cat stderr -input: -./calc.at:1336: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1334: "$PERL" -pi -e 'use strict; +./calc.at:1340: $PREPARSER ./calc input +stderr: +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -25113,7 +25361,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1338: cat stderr +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1336: cat stderr +stderr: +./calc.at:1337: cat stderr +syntax error +input: + | + | +1 +./calc.at:1338: $PREPARSER ./calc input +stderr: +input: +input: +2.1: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ + | (!!) + (1 2) = 1 +./calc.at:1336: $PREPARSER ./calc input +./calc.at:1337: $PREPARSER ./calc input +stderr: stderr: +stderr: +memory exhausted +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error Starting parse Entering state 0 Stack now 0 @@ -25254,12 +25527,30 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1336: $PREPARSER ./calc input stderr: -./calc.at:1334: cat stderr +memory exhausted +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 @@ -25400,9 +25691,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1340: cat stderr stderr: -470. calc.at:1334: ok -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -25413,18 +25703,59 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +stdout: +./calc.at:1338: cat stderr + | error +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $PREPARSER ./test +stderr: +stderr: +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: $PREPARSER ./calc /dev/null +stderr: +======== Testing with C++ standard flags: '' +./calc.at:1336: cat stderr +1.1: syntax error stderr: -./calc.at:1336: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1337: cat stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error input: - - | 1 + 2 * 3 + !- ++ -./calc.at:1336: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: input: + | (#) + (#) = 2222 +./calc.at:1336: $PREPARSER ./calc input +1.1: syntax error | (- *) + (1 2) = 1 +stderr: ./calc.at:1337: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' ./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -25574,8 +25905,20 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -25725,9 +26068,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -475. calc.at:1341: testing Calculator %verbose ... -./calc.at:1341: mv calc.y.tmp calc.y - +./calc.at:1340: cat stderr +./calc.at:1338: cat stderr ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -25738,6 +26080,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: ./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -25748,14 +26091,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1341: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | 1 = 2 = 3 +./calc.at:1340: $PREPARSER ./calc input +stderr: +input: +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1338: $PREPARSER ./calc input ./calc.at:1337: cat stderr +stderr: +stderr: ./calc.at:1336: cat stderr +syntax error +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | (* *) + (*) + (*) ./calc.at:1337: $PREPARSER ./calc input stderr: input: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: + | (1 + #) = 1111 +./calc.at:1336: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -25908,12 +26275,32 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1336: $PREPARSER ./calc input stderr: stderr: -memory exhausted +syntax error: invalid character: '#' ./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -26065,8 +26452,12 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: -memory exhausted +syntax error: invalid character: '#' +./calc.at:1340: cat stderr +./calc.at:1338: cat stderr ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -26077,9 +26468,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stdout: -./calc.at:1341: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: + | (!!) + (1 2) = 1 +input: +./calc.at:1338: $PREPARSER ./calc input ./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -26090,40 +26482,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' + | + | +1 +./calc.at:1340: $PREPARSER ./calc input +stderr: +stderr: +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1337: cat stderr -./calc.at:1338: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -input: +stderr: +syntax error +stderr: ./calc.at:1336: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS | 1 + 2 * 3 + !+ ++ ./calc.at:1337: $PREPARSER ./calc input -input: stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 input: -./calc.at:1338: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -26204,15 +26585,31 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1338: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 + | (# + 1) = 1111 ./calc.at:1336: $PREPARSER ./calc input -stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -26293,32 +26690,21 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1337: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stdout: syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1340: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +./calc.at:1340: cat stderr input: +stderr: +syntax error: invalid character: '#' | 1 + 2 * 3 + !- ++ -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1340: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - input: +./calc.at:1337: $PREPARSER ./calc input stderr: +./calc.at:1340: $PREPARSER ./calc /dev/null + | (- *) + (1 2) = 1 +./calc.at:1338: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -26399,12 +26785,16 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +stderr: ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1338: $PREPARSER ./calc input +syntax error stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: ./calc.at:1336: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -26415,6 +26805,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 @@ -26495,22 +26886,11 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -1.3: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1340: $PREPARSER ./calc input +syntax error +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -26521,14 +26901,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error ./calc.at:1336: cat stderr -stderr: -./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1337: cat stderr ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -26540,19 +26913,36 @@ }eg ' expout || exit 77 input: -input: - | (1 + #) = 1111 +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + # + 1) = 1111 ./calc.at:1336: $PREPARSER ./calc input - | 1 2 -input: -./calc.at:1340: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1337: $PREPARSER ./calc input stderr: +./calc.at:1337: cat stderr +./calc.at:1338: cat stderr +syntax error: invalid character: '#' +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: stderr: syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1340: cat stderr +./calc.at:1337: $PREPARSER ./calc input +input: +stdout: + | (* *) + (*) + (*) +./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1338: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -26634,13 +27024,43 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +stderr: ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: cat stderr -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1341: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1340: $PREPARSER ./calc input stderr: stderr: +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +input: Starting parse Entering state 0 Stack now 0 @@ -26722,11 +27142,28 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -syntax error: invalid character: '#' syntax error -input: - | 1//2 -./calc.at:1337: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1341: $PREPARSER ./calc input +stderr: +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26736,11 +27173,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: $PREPARSER ./calc input +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26750,7 +27190,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1336: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1336: cat stderr +./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1338: cat stderr +input: +input: + | (1 + 1) / (1 - 1) +./calc.at:1336: $PREPARSER ./calc input + | 1 2 +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1337: cat stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -26760,25 +27211,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: cat stderr stderr: -1.3: syntax error input: +stderr: +error: null divisor +./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1338: $PREPARSER ./calc input +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: +stderr: | (#) + (#) = 2222 -./calc.at:1337: $PREPARSER ./calc input -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1340: cat stderr +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1336: cat stderr +error: null divisor +syntax error Starting parse Entering state 0 Stack now 0 @@ -26902,6 +27354,8 @@ Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: +stderr: Starting parse Entering state 0 Stack now 0 @@ -27023,18 +27477,24 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -input: - | 1//2 +./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (!!) + (1 2) = 1 ./calc.at:1340: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1336: $PREPARSER ./calc input -stderr: -./calc.at:1338: cat stderr stderr: +./calc.at:1336: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 syntax error +error: 2222 != 1 ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27044,23 +27504,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: - | error -stderr: -./calc.at:1338: $PREPARSER ./calc input +./calc.at:1337: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 syntax error -./calc.at:1337: cat stderr -syntax error: invalid character: '#' +error: 2222 != 1 + | 1 + 2 * 3 + !- ++ +./calc.at:1338: $PREPARSER ./calc input stderr: -1.1: syntax error ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + #) = 1111 -./calc.at:1337: $PREPARSER ./calc input -stderr: +./calc.at:1337: cat stderr +./calc.at:1341: cat stderr +./calc.at:1336: cat stderr ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27071,6 +27535,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +471. calc.at:1336: ok +input: +input: + | 1//2 + | (1 + #) = 1111 +./calc.at:1337: $PREPARSER ./calc input +./calc.at:1341: $PREPARSER ./calc input +stderr: +stderr: +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -27169,10 +27645,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1340: cat stderr ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error -./calc.at:1336: "$PERL" -pi -e 'use strict; +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27182,6 +27657,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + +stderr: +syntax error stderr: Starting parse Entering state 0 @@ -27281,7 +27759,16 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1338: "$PERL" -pi -e 'use strict; +input: + | (- *) + (1 2) = 1 +./calc.at:1340: $PREPARSER ./calc input +stderr: +syntax error +syntax error +error: 2222 != 1 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27292,9 +27779,7 @@ }eg ' expout || exit 77 stderr: -./calc.at:1340: cat stderr -stdout: -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27304,56 +27789,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1341: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +syntax error +syntax error +error: 2222 != 1 input: - | error -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1336: cat stderr -./calc.at:1338: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1338: $PREPARSER ./calc input +476. calc.at:1342: testing Calculator %yacc ... +./calc.at:1342: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + ./calc.at:1337: cat stderr stderr: -./calc.at:1341: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -input: -./calc.at:1336: $PREPARSER ./calc input -input: +1.14: memory exhausted +./calc.at:1342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: cat stderr input: | (# + 1) = 1111 ./calc.at:1337: $PREPARSER ./calc input stderr: - | 1 = 2 = 3 -./calc.at:1338: $PREPARSER ./calc input -stderr: stderr: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1341: $PREPARSER ./calc input -syntax error Starting parse Entering state 0 Stack now 0 @@ -27450,15 +27911,24 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -1.7: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1340: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +1.14: memory exhausted + | error +./calc.at:1341: $PREPARSER ./calc input stderr: -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error -stderr: Starting parse Entering state 0 Stack now 0 @@ -27554,20 +28024,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1340: cat stderr stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27578,6 +28036,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error +input: ./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -27588,12 +28048,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 2 -./calc.at:1341: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1340: $PREPARSER ./calc input ./calc.at:1338: cat stderr stderr: -./calc.at:1336: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1337: cat stderr + | (#) + (#) = 2222 +./calc.at:1338: $PREPARSER ./calc input +stderr: +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27603,27 +28071,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: cat stderr -./calc.at:1340: cat stderr syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -input: syntax error - | - | +1 -./calc.at:1338: $PREPARSER ./calc input +syntax error input: +stderr: | (1 + # + 1) = 1111 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' ./calc.at:1337: $PREPARSER ./calc input -stderr: -./calc.at:1336: cat stderr - | 1 = 2 = 3 -2.1: syntax error -stderr: -./calc.at:1340: $PREPARSER ./calc input ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -27740,24 +28198,9 @@ Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -2.1: syntax error - | (1 + 1) / (1 - 1) -./calc.at:1336: $PREPARSER ./calc input -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1341: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: Starting parse Entering state 0 @@ -27873,11 +28316,7 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -syntax error -error: null divisor -./calc.at:1336: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27887,8 +28326,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27898,9 +28336,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -error: null divisor -./calc.at:1341: cat stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +input: +./calc.at:1342: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | 1 = 2 = 3 +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1340: cat stderr +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27910,12 +28351,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr input: +stderr: +syntax error + | 1 + 2 * 3 + !+ ++ +./calc.at:1340: $PREPARSER ./calc input ./calc.at:1337: cat stderr - | 1//2 -./calc.at:1338: cat stderr -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1336: "$PERL" -pi -e 'use strict; +stderr: +input: +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1338: $PREPARSER ./calc input +stderr: +input: +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -27926,16 +28379,11 @@ }eg ' expout || exit 77 stderr: -./calc.at:1338: $PREPARSER ./calc /dev/null -stderr: -syntax error -./calc.at:1340: cat stderr -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -input: +1.6: syntax error: invalid character: '#' ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + 1) / (1 - 1) ./calc.at:1337: $PREPARSER ./calc input +./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: stderr: Starting parse @@ -28079,16 +28527,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: ./calc.at:1337: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -1.1: syntax error -input: -./calc.at:1336: cat stderr - | - | +1 -./calc.at:1340: $PREPARSER ./calc input -471. calc.at:1336: ok +1.6: syntax error: invalid character: '#' stderr: Starting parse Entering state 0 @@ -28231,9 +28671,11 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -syntax error -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: cat stderr +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1340: $PREPARSER ./calc input +input: ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28244,8 +28686,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | + | +1 stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1341: $PREPARSER ./calc input +stderr: +./calc.at:1337: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28255,7 +28701,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1337: "$PERL" -pi -e 'use strict; +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +./calc.at:1338: cat stderr +./calc.at:1337: cat stderr +input: +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28265,12 +28720,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error - -./calc.at:1338: cat stderr -./calc.at:1337: cat stderr -input: -./calc.at:1340: "$PERL" -pi -e 'use strict; +472. calc.at:1337: ok + | (# + 1) = 1111 +./calc.at:1338: $PREPARSER ./calc input +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28280,42 +28733,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1341: cat stderr -./calc.at:1338: $PREPARSER ./calc input stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -472. calc.at:1337: ok +1.2: syntax error: invalid character: '#' ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -input: ./calc.at:1340: cat stderr - | error -./calc.at:1341: $PREPARSER ./calc input -476. calc.at:1342: testing Calculator %yacc ... -./calc.at:1342: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - +stderr: +./calc.at:1341: cat stderr +1.2: syntax error: invalid character: '#' -./calc.at:1340: $PREPARSER ./calc /dev/null +input: +./calc.at:1341: $PREPARSER ./calc /dev/null + | 1 + 2 * 3 + !* ++ +./calc.at:1340: $PREPARSER ./calc input stderr: +memory exhausted +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28326,15 +28759,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1342: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: syntax error +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stdout: stderr: syntax error +./types.at:139: $PREPARSER ./test +memory exhausted +stderr: ./calc.at:1338: cat stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +477. calc.at:1343: testing Calculator parse.error=detailed ... +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28344,8 +28782,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1343: mv calc.y.tmp calc.y + +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28355,27 +28794,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (!!) + (1 2) = 1 +input: +======== Testing with C++ standard flags: '' +./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (1 + # + 1) = 1111 ./calc.at:1338: $PREPARSER ./calc input stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 +1.6: syntax error: invalid character: '#' ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -477. calc.at:1343: testing Calculator parse.error=detailed ... +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1340: cat stderr -./calc.at:1343: mv calc.y.tmp calc.y - +./calc.at:1341: cat stderr +stderr: +1.6: syntax error: invalid character: '#' stderr: stderr: -./calc.at:1343: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: stdout: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1341: cat stderr +stdout: + | (#) + (#) = 2222 ./types.at:139: $PREPARSER ./test -stderr: input: +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1342: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28386,46 +28830,65 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stdout: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1340: $PREPARSER ./calc input - | 1 = 2 = 3 ./calc.at:1341: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + ./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: syntax error syntax error syntax error syntax error error: 4444 != 1 -stderr: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1338: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 syntax error syntax error syntax error syntax error error: 4444 != 1 -./calc.at:1338: cat stderr +./calc.at:1342: $PREPARSER ./calc input stderr: -syntax error ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -443. types.at:139: ok -./calc.at:1342: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (- *) + (1 2) = 1 + | (1 + 1) / (1 - 1) ./calc.at:1338: $PREPARSER ./calc input stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +stderr: +1.11-17: error: null divisor +./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr ./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -28448,11 +28911,15 @@ }eg ' expout || exit 77 stderr: - -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 ./calc.at:1343: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +1.11-17: error: null divisor +input: + | 1 2 +./calc.at:1342: $PREPARSER ./calc input +stderr: +./calc.at:1340: cat stderr +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1338: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28463,49 +28930,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr +stderr: +stderr: ./calc.at:1341: cat stderr -input: +syntax error +stdout: ./calc.at:1338: cat stderr input: - | (!!) + (1 2) = 1 +./types.at:139: ./check + | (1 + #) = 1111 +input: ./calc.at:1340: $PREPARSER ./calc input - | - | +1 -stderr: +473. calc.at:1338: ok +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | (!!) + (1 2) = 1 ./calc.at:1341: $PREPARSER ./calc input -input: +stderr: +syntax error: invalid character: '#' +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error error: 2222 != 1 -478. calc.at:1344: testing Calculator parse.error=verbose ... -./calc.at:1344: mv calc.y.tmp calc.y +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: -./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error - | (* *) + (*) + (*) -./calc.at:1338: $PREPARSER ./calc input +syntax error: invalid character: '#' stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1342: cat stderr stderr: +stdout: syntax error error: 2222 != 1 -stderr: -stdout: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./types.at:139: ./check -stderr: +input: ./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -syntax error -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1338: "$PERL" -pi -e 'use strict; + | 1//2 +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28515,7 +28986,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: "$PERL" -pi -e 'use strict; +stderr: +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28525,7 +29000,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1341: "$PERL" -pi -e 'use strict; +syntax error +478. calc.at:1344: testing Calculator parse.error=verbose ... +./calc.at:1340: cat stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1344: mv calc.y.tmp calc.y + +./calc.at:1341: cat stderr +./calc.at:1344: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +input: +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28535,39 +29019,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: cat stderr -./calc.at:1340: cat stderr -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1344: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | (# + 1) = 1111 +./calc.at:1340: $PREPARSER ./calc input input: | (- *) + (1 2) = 1 stderr: -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: cat stderr +./calc.at:1341: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error syntax error error: 2222 != 1 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: $PREPARSER ./calc /dev/null -stderr: -stderr: -./calc.at:1338: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1342: cat stderr stderr: +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -syntax error +syntax error: invalid character: '#' +stderr: syntax error syntax error error: 2222 != 1 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: - | 1 + 2 * 3 + !- ++ -syntax error -./calc.at:1338: $PREPARSER ./calc input + | error +./calc.at:1342: $PREPARSER ./calc input stderr: ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -28579,8 +29054,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28591,8 +29066,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +syntax error ./calc.at:1340: cat stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; +input: +./calc.at:1344: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | (1 + # + 1) = 1111 +./calc.at:1341: cat stderr +./calc.at:1340: $PREPARSER ./calc input +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28602,50 +29084,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1341: cat stderr - | (* *) + (*) + (*) -./calc.at:1340: $PREPARSER ./calc input stderr: -syntax error -syntax error -syntax error +syntax error: invalid character: '#' ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: cat stderr input: + | (* *) + (*) + (*) +./calc.at:1341: $PREPARSER ./calc input stderr: stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1342: cat stderr +syntax error: invalid character: '#' syntax error syntax error syntax error -./calc.at:1341: $PREPARSER ./calc input -stdout: -./calc.at:1342: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: syntax error syntax error syntax error -syntax error -error: 4444 != 1 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1338: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - + | 1 = 2 = 3 +./calc.at:1342: $PREPARSER ./calc input ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28658,40 +29117,7 @@ ' expout || exit 77 stderr: syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -input: -stderr: -1.14: memory exhausted - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1342: $PREPARSER ./calc input -stderr: -./calc.at:1340: cat stderr ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28703,80 +29129,17 @@ }eg ' expout || exit 77 stderr: -input: -./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1340: $PREPARSER ./calc input -stderr: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1338: cat stderr -./calc.at:1341: cat stderr -input: -./calc.at:1340: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | 1 2 -./calc.at:1342: $PREPARSER ./calc input -input: -input: - | (!!) + (1 2) = 1 -./calc.at:1341: $PREPARSER ./calc input -stderr: - | (#) + (#) = 2222 -./calc.at:1338: $PREPARSER ./calc input -stderr: syntax error -input: +./calc.at:1340: cat stderr stderr: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ +stdout: +./types.at:139: $PREPARSER ./test +input: +./calc.at:1341: cat stderr + | (1 + 1) / (1 - 1) ./calc.at:1340: $PREPARSER ./calc input -syntax error -error: 2222 != 1 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -error: 2222 != 1 -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' stderr: stderr: -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28787,53 +29150,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -./calc.at:1338: cat stderr -./calc.at:1342: cat stderr -input: input: - | 1 + 2 * 3 + !* ++ - | (1 + #) = 1111 -./calc.at:1338: $PREPARSER ./calc input -./calc.at:1340: $PREPARSER ./calc input -input: -stderr: - | 1//2 -stderr: -./calc.at:1342: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted +error: null divisor ./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1341: $PREPARSER ./calc input stderr: -./calc.at:1341: cat stderr -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted +======== Testing with C++ standard flags: '' stderr: -1.6: syntax error: invalid character: '#' +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1342: cat stderr +./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: -syntax error - | (- *) + (1 2) = 1 +input: + | 1 + 2 * 3 + !- ++ ./calc.at:1341: $PREPARSER ./calc input stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: + | + | +1 +./calc.at:1342: $PREPARSER ./calc input ./calc.at:1340: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28844,23 +29183,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: syntax error -error: 2222 != 1 -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1338: cat stderr ./calc.at:1340: cat stderr -./calc.at:1342: cat stderr -input: +474. calc.at:1340: ok ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28871,23 +29202,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -input: -./calc.at:1338: $PREPARSER ./calc input - | (#) + (#) = 2222 -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error: invalid character: '#' -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1341: cat stderr -input: -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -28897,47 +29212,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error -./calc.at:1342: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' stderr: -./calc.at:1338: cat stderr -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + +./calc.at:1341: cat stderr +./calc.at:1342: cat stderr +./calc.at:1342: $PREPARSER ./calc /dev/null input: - | (* *) + (*) + (*) + | 1 + 2 * 3 + !* ++ stderr: ./calc.at:1341: $PREPARSER ./calc input -input: syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (1 + # + 1) = 1111 -syntax error -syntax error -syntax error -./calc.at:1338: $PREPARSER ./calc input +memory exhausted ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error -syntax error -syntax error stderr: -1.6: syntax error: invalid character: '#' +memory exhausted +479. calc.at:1346: testing Calculator api.pure=full %locations ... +./calc.at:1346: mv calc.y.tmp calc.y + +./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28948,19 +29247,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -input: -stderr: -./calc.at:1338: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -28971,15 +29257,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1342: cat stderr +./calc.at:1341: cat stderr +stderr: +input: stdout: -./calc.at:1340: $PREPARSER ./calc input +input: + | (#) + (#) = 2222 +./calc.at:1341: $PREPARSER ./calc input ./calc.at:1343: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1342: cat stderr stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1342: $PREPARSER ./calc input syntax error: invalid character: '#' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +syntax error: invalid character: '#' +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1343: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -28991,15 +29285,17 @@ || /\t/ )' calc.c -./calc.at:1338: cat stderr +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1341: cat stderr stderr: - | 1 = 2 = 3 syntax error: invalid character: '#' -./calc.at:1342: $PREPARSER ./calc input +syntax error: invalid character: '#' input: -stdout: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -29013,73 +29309,16 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 ./calc.at:1343: $PREPARSER ./calc input stderr: -./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: -input: -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1338: $PREPARSER ./calc input -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1341: $PREPARSER ./calc input -stderr: -./calc.at:1344: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -stderr: -1.11-17: error: null divisor -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1338: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error stderr: ./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1341: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -1.11-17: error: null divisor - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1344: $PREPARSER ./calc input -input: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1341: $PREPARSER ./calc input - | 1 2 ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29090,9 +29329,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1340: cat stderr -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1338: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29102,35 +29339,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1346: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1342: cat stderr +input: +./calc.at:1341: cat stderr + | 1 2 +./calc.at:1343: $PREPARSER ./calc input stderr: -stderr: -stderr: -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: syntax error, unexpected number +input: ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1342: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1341: $PREPARSER ./calc input stderr: -input: stderr: stderr: - | (# + 1) = 1111 +syntax error: invalid character: '#' +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error, unexpected number -./calc.at:1340: $PREPARSER ./calc input -./calc.at:1342: cat stderr +syntax error +error: 2222 != 1 +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1338: cat stderr -./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr syntax error: invalid character: '#' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -473. calc.at:1338: ok stderr: -input: -syntax error: invalid character: '#' - | - | +1 -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1341: "$PERL" -pi -e 'use strict; +syntax error +error: 2222 != 1 +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29140,8 +29378,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29151,16 +29388,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: $PREPARSER ./calc input -stderr: -stderr: -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected number -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: cat stderr -stderr: -./calc.at:1340: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29170,60 +29398,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1343: cat stderr ./calc.at:1341: cat stderr - -syntax error -stderr: -syntax error, unexpected number input: -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1341: $PREPARSER ./calc input | 1//2 ./calc.at:1343: $PREPARSER ./calc input +input: + | (# + 1) = 1111 +./calc.at:1341: $PREPARSER ./calc input +./calc.at:1342: cat stderr stderr: stderr: -./calc.at:1340: cat stderr -memory exhausted -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: +syntax error: invalid character: '#' + | (- *) + (1 2) = 1 +./calc.at:1342: $PREPARSER ./calc input stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 syntax error, unexpected '/', expecting number or '-' or '(' or '!' -memory exhausted -input: -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1340: $PREPARSER ./calc input -479. calc.at:1346: testing Calculator api.pure=full %locations ... stderr: -./calc.at:1346: mv calc.y.tmp calc.y - -syntax error: invalid character: '#' -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: cat stderr -./calc.at:1346: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1343: "$PERL" -pi -e 'use strict; +syntax error +syntax error +error: 2222 != 1 +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29234,7 +29436,7 @@ }eg ' expout || exit 77 stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29244,21 +29446,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1342: cat stderr -input: - | 1//2 -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1342: $PREPARSER ./calc /dev/null -./calc.at:1343: cat stderr -stderr: -stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' syntax error +syntax error +error: 2222 != 1 +./calc.at:1343: cat stderr ./calc.at:1341: cat stderr -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1340: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29268,108 +29462,66 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stdout: input: -stderr: +./calc.at:1344: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' | error -syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1343: $PREPARSER ./calc input -stderr: input: stderr: -syntax error - | (#) + (#) = 2222 + | (1 + # + 1) = 1111 ./calc.at:1341: $PREPARSER ./calc input +./calc.at:1344: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + syntax error, unexpected invalid token ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' +./calc.at:1342: cat stderr syntax error: invalid character: '#' ./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1340: cat stderr -syntax error, unexpected invalid token -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1346: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1344: cat stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1340: $PREPARSER ./calc input -stderr: input: - | error + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1344: $PREPARSER ./calc input -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -error: null divisor -./calc.at:1340: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: cat stderr -./calc.at:1343: cat stderr -stderr: stderr: syntax error, unexpected invalid token -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor -input: -./calc.at:1341: cat stderr -input: - | 1 = 2 = 3 -./calc.at:1343: $PREPARSER ./calc input stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) ./calc.at:1342: $PREPARSER ./calc input -syntax error, unexpected invalid token stderr: stderr: -syntax error, unexpected '=' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error +./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr syntax error syntax error syntax error -error: 4444 != 1 ./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1340: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29379,45 +29531,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1341: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error, unexpected '=' +./calc.at:1344: $PREPARSER ./calc input stderr: syntax error syntax error syntax error -syntax error -error: 4444 != 1 -./calc.at:1344: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1340: cat stderr -./calc.at:1344: cat stderr -474. calc.at:1340: ok -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: +stderr: ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29428,6 +29547,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error, unexpected number +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: cat stderr +stderr: +syntax error, unexpected number +input: ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29439,40 +29564,15 @@ }eg ' expout || exit 77 | 1 = 2 = 3 -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1343: cat stderr -stderr: -syntax error, unexpected '=' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1341: cat stderr -input: - -./calc.at:1342: cat stderr - | - | +1 ./calc.at:1343: $PREPARSER ./calc input stderr: -input: -stderr: +./calc.at:1341: cat stderr syntax error, unexpected '=' - | (# + 1) = 1111 -./calc.at:1341: $PREPARSER ./calc input -syntax error, unexpected '+' ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -stderr: -./calc.at:1342: $PREPARSER ./calc input -syntax error, unexpected '+' -stderr: + | (1 + 1) / (1 - 1) stderr: -syntax error -error: 2222 != 1 -syntax error: invalid character: '#' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1341: $PREPARSER ./calc input ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29483,14 +29583,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error, unexpected '=' stderr: -480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations ... -syntax error -error: 2222 != 1 -./calc.at:1347: mv calc.y.tmp calc.y - +./calc.at:1342: cat stderr +error: null divisor +./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1344: cat stderr -./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +error: null divisor +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1342: $PREPARSER ./calc input +stderr: +input: +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29501,6 +29607,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1//2 +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1343: cat stderr +stderr: ./calc.at:1341: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29511,42 +29621,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: cat stderr +./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stderr: +input: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' + | 1 + 2 * 3 + !- ++ | | +1 -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1343: $PREPARSER ./calc /dev/null +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1342: $PREPARSER ./calc input ./calc.at:1341: cat stderr -syntax error, unexpected '+' stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected end of file -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error, unexpected '+' - | (1 + # + 1) = 1111 -./calc.at:1341: $PREPARSER ./calc input -stderr: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +475. calc.at:1341: ok stderr: -syntax error: invalid character: '#' -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected end of file -./calc.at:1342: cat stderr stderr: -syntax error: invalid character: '#' ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29557,9 +29653,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1347: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -input: -./calc.at:1343: "$PERL" -pi -e 'use strict; +syntax error, unexpected '+' +./calc.at:1344: cat stderr + +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29569,18 +29666,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1343: cat stderr -stderr: -./calc.at:1344: cat stderr -syntax error -syntax error -error: 2222 != 1 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29590,46 +29676,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1343: cat stderr input: -syntax error -syntax error -error: 2222 != 1 -syntax error, unexpected end of input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1344: $PREPARSER ./calc input stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 +./calc.at:1343: $PREPARSER ./calc /dev/null +./calc.at:1342: cat stderr +stderr: +syntax error, unexpected invalid token +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected end of file ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1341: cat stderr -syntax error, unexpected end of input +stdout: stderr: stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 +stderr: +input: stdout: + | 1 + 2 * 3 + !* ++ +./calc.at:1342: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test +syntax error, unexpected invalid token +480. calc.at:1347: testing Calculator api.push-pull=both api.pure=full %locations ... ./calc.at:1346: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: - | (1 + 1) / (1 - 1) +stderr: +syntax error, unexpected end of file +stderr: +./calc.at:1347: mv calc.y.tmp calc.y + +./calc.at:1347: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +memory exhausted +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1346: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -29641,9 +29720,7 @@ || /\t/ )' calc.c -./calc.at:1341: $PREPARSER ./calc input -./calc.at:1342: cat stderr -stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29654,8 +29731,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -error: null divisor -./calc.at:1341: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29666,12 +29741,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +======== Testing with C++ standard flags: '' ./calc.at:1343: cat stderr +memory exhausted +./calc.at:1344: cat stderr input: -input: -stderr: - | (* *) + (*) + (*) -./calc.at:1342: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -29685,52 +29760,27 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -error: null divisor -stderr: ./calc.at:1346: $PREPARSER ./calc input -syntax error -syntax error -syntax error -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: cat stderr input: stderr: - | (!!) + (1 2) = 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1343: $PREPARSER ./calc input -stderr: -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -syntax error -syntax error, unexpected number -error: 2222 != 1 input: -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -stderr: -./calc.at:1341: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 ./calc.at:1344: $PREPARSER ./calc input -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: stderr: syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' error: 4444 != 1 +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '=' ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29741,18 +29791,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: stderr: -input: syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' error: 4444 != 1 +syntax error, unexpected '=' +./calc.at:1342: cat stderr +input: | 1 2 ./calc.at:1346: $PREPARSER ./calc input -./calc.at:1341: cat stderr stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1347: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29764,10 +29819,7 @@ ' expout || exit 77 1.3: syntax error ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: cat stderr -475. calc.at:1341: ok -./calc.at:1343: cat stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29777,16 +29829,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.3: syntax error -input: - | 1 + 2 * 3 + !+ ++ + | (#) + (#) = 2222 ./calc.at:1342: $PREPARSER ./calc input -input: stderr: - | (- *) + (1 2) = 1 -./calc.at:1343: $PREPARSER ./calc input +stderr: ./calc.at:1344: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +1.3: syntax error +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: cat stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29797,28 +29852,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: input: -./calc.at:1346: cat stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 - | (!!) + (1 2) = 1 -./calc.at:1342: $EGREP -c -v 'Return for a new token:|LAC:' stderr - + | + | +1 ./calc.at:1344: $PREPARSER ./calc input stderr: -syntax error, unexpected number -error: 2222 != 1 +syntax error, unexpected '+' +input: ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1346: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29828,36 +29873,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: +syntax error, unexpected '+' input: -input: - | 1 + 2 * 3 + !- ++ | 1//2 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1343: cat stderr -stderr: syntax error, unexpected number error: 2222 != 1 -stderr: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1342: cat stderr stderr: 1.3: syntax error ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1343: $PREPARSER ./calc input stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected number +error: 2222 != 1 stderr: +input: 1.3: syntax error -stderr: -481. calc.at:1348: testing Calculator parse.error=detailed %locations ... -./calc.at:1348: mv calc.y.tmp calc.y - -stderr: + | (1 + #) = 1111 +./calc.at:1342: $PREPARSER ./calc input ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29868,9 +29904,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +stderr: ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29881,18 +29916,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: cat stderr -./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29903,42 +29927,52 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1344: cat stderr +syntax error: invalid character: '#' +./types.at:139: ./check +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1343: cat stderr +./calc.at:1344: $PREPARSER ./calc /dev/null +stderr: +syntax error: invalid character: '#' +stderr: ./calc.at:1346: cat stderr +syntax error, unexpected end of input input: -./calc.at:1343: cat stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (- *) + (1 2) = 1 -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1343: $PREPARSER ./calc input stderr: +stderr: +syntax error, unexpected end of input syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected number error: 2222 != 1 input: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1343: $PREPARSER ./calc input -input: -stderr: ./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | error ./calc.at:1346: $PREPARSER ./calc input -./calc.at:1342: cat stderr stderr: -stderr: -./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 1.1: syntax error -stderr: ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected number error: 2222 != 1 stderr: -1.1: syntax error -input: -input: - | 1 + 2 * 3 + !* ++ - | 1 + 2 * 3 + !- ++ -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1346: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29948,12 +29982,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: $PREPARSER ./calc input -stderr: -stderr: -memory exhausted -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +1.1: syntax error +./calc.at:1342: cat stderr +./calc.at:1344: cat stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -29963,25 +29995,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -stderr: -./calc.at:1344: cat stderr -./calc.at:1346: cat stderr -memory exhausted -./types.at:139: $PREPARSER ./test -input: -input: - | 1 = 2 = 3 -stderr: -./calc.at:1346: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1348: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -stderr: ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -29992,17 +30005,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +input: +input: + | (# + 1) = 1111 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1342: $PREPARSER ./calc input +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1343: cat stderr +stderr: +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1346: cat stderr ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +./calc.at:1343: $PREPARSER ./calc input +stderr: +input: +stderr: +stderr: +syntax error: invalid character: '#' + | 1 = 2 = 3 +./calc.at:1346: $PREPARSER ./calc input +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +stderr: 1.7: syntax error ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./calc.at:1343: cat stderr stderr: -./calc.at:1342: "$PERL" -pi -e 'use strict; +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +1.7: syntax error +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30012,15 +30061,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.7: syntax error -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1 + 2 * 3 + !* ++ -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30030,11 +30071,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: $PREPARSER ./calc input -stderr: -memory exhausted -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30044,16 +30081,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -memory exhausted -./calc.at:1342: cat stderr -./calc.at:1346: cat stderr -./calc.at:1344: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1342: $PREPARSER ./calc input -input: -stderr: ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30064,35 +30091,49 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1344: cat stderr +input: +./calc.at:1343: cat stderr +./calc.at:1346: cat stderr +./calc.at:1342: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1344: $PREPARSER ./calc input +stderr: input: +input: +syntax error, unexpected number +error: 2222 != 1 | | +1 - | 1 + 2 * 3 + !+ ++ +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1346: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1343: $PREPARSER ./calc input + | (1 + # + 1) = 1111 stderr: +./calc.at:1342: $PREPARSER ./calc input 2.1: syntax error +stderr: ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' +stderr: +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: syntax error: invalid character: '#' -input: +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test +stderr: stderr: - | (#) + (#) = 2222 -./calc.at:1343: $PREPARSER ./calc input 2.1: syntax error stderr: +./calc.at:1343: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr -syntax error: invalid character: '#' syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30103,11 +30144,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30117,14 +30154,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +======== Testing with C++ standard flags: '' +input: | 1 + 2 * 3 + !- ++ -./calc.at:1344: $PREPARSER ./calc input +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1344: cat stderr stderr: ./calc.at:1346: cat stderr -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1342: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1346: $PREPARSER ./calc /dev/null +input: +stderr: +1.1: syntax error +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + | (- *) + (1 2) = 1 +./calc.at:1344: $PREPARSER ./calc input stderr: -stdout: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1342: cat stderr ./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30135,18 +30199,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1346: $PREPARSER ./calc /dev/null stderr: -stdout: -./calc.at:1347: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1342: cat stderr -./types.at:139: ./check +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1346: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: ./calc.at:1343: cat stderr +stdout: +input: +./calc.at:1347: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1346: cat stderr + | (1 + 1) / (1 - 1) +input: +./calc.at:1342: $PREPARSER ./calc input stderr: -1.1: syntax error -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ +input: ./calc.at:1347: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -30158,10 +30236,10 @@ || /\t/ )' calc.c -stdout: -input: -stderr: -./types.at:139: $PREPARSER ./test +error: null divisor +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30172,11 +30250,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1342: $PREPARSER ./calc input -1.1: syntax error -input: +./calc.at:1346: $PREPARSER ./calc input +stderr: input: +memory exhausted +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +error: null divisor | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -30191,93 +30272,29 @@ | 2^2^3 = 256 | (2^2)^3 = 64 stderr: -./calc.at:1347: $PREPARSER ./calc input - | (1 + #) = 1111 -stderr: -syntax error: invalid character: '#' -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -./calc.at:1344: cat stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1346: cat stderr -stderr: -======== Testing with C++ standard flags: '' -stderr: -syntax error: invalid character: '#' -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1344: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1346: $PREPARSER ./calc input -stderr: -stderr: -memory exhausted -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 1.2: syntax error 1.18: syntax error 1.23: syntax error 1.41: syntax error 1.1-46: error: 4444 != 1 -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - | 1 2 ./calc.at:1347: $PREPARSER ./calc input -stderr: +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: cat stderr memory exhausted stderr: stderr: +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error 1.18: syntax error 1.23: syntax error 1.41: syntax error 1.1-46: error: 4444 != 1 -1.3: syntax error -./calc.at:1343: cat stderr -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1342: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1344: $PREPARSER ./calc input stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1342: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30287,12 +30304,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -1.3: syntax error - | (# + 1) = 1111 -input: -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1344: "$PERL" -pi -e 'use strict; +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30302,13 +30317,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | (# + 1) = 1111 -./calc.at:1342: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30318,30 +30329,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1346: cat stderr -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1344: cat stderr -syntax error: invalid character: '#' stderr: -syntax error: invalid character: '#' +stdout: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1343: cat stderr +input: +./types.at:139: ./check +./calc.at:1342: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y + | 1 2 +./calc.at:1347: $PREPARSER ./calc input +./calc.at:1346: cat stderr +stderr: +476. calc.at:1342: ok +1.3: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: input: -./calc.at:1347: cat stderr | (#) + (#) = 2222 -./calc.at:1344: $PREPARSER ./calc input -stderr: +./calc.at:1343: $PREPARSER ./calc input | (!!) + (1 2) = 1 -stdout: -./calc.at:1346: $PREPARSER ./calc input -stderr: -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test -./calc.at:1343: "$PERL" -pi -e 'use strict; +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30351,38 +30362,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1346: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.3: syntax error 1.11: syntax error 1.1-16: error: 2222 != 1 ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -./calc.at:1343: cat stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1//2 -./calc.at:1347: $PREPARSER ./calc input syntax error: invalid character: '#' syntax error: invalid character: '#' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: cat stderr stderr: + 1.11: syntax error 1.1-16: error: 2222 != 1 stderr: -1.3: syntax error -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -442. types.at:139: ok -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30392,18 +30389,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: - | (1 + # + 1) = 1111 -./calc.at:1342: cat stderr -./calc.at:1343: $PREPARSER ./calc input -stderr: -./types.at:139: ./check -stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -1.3: syntax error -stdout: -./calc.at:1344: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30413,17 +30403,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./types.at:139: $PREPARSER ./test -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1344: cat stderr -input: -./calc.at:1346: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1344: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30433,93 +30418,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1342: $PREPARSER ./calc input +481. calc.at:1348: testing Calculator parse.error=detailed %locations ... +./calc.at:1348: mv calc.y.tmp calc.y -stdout: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1344: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1348: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1346: cat stderr input: -stderr: +./calc.at:1343: cat stderr + | 1//2 +./calc.at:1347: $PREPARSER ./calc input input: - | (1 + #) = 1111 -======== Testing with C++ standard flags: '' -syntax error: invalid character: '#' + | 1 + 2 * 3 + !- ++ ./calc.at:1344: $PREPARSER ./calc input +stderr: +input: +1.3: syntax error +stderr: | (- *) + (1 2) = 1 +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1346: $PREPARSER ./calc input stderr: -./calc.at:1348: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -syntax error: invalid character: '#' -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS ./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: cat stderr +input: stderr: -./calc.at:1343: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 + | (1 + #) = 1111 +./calc.at:1343: $PREPARSER ./calc input ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1348: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +1.3: syntax error stderr: -input: stderr: -482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1350: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - syntax error: invalid character: '#' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 - | error -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1343: cat stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30530,11 +30469,7 @@ }eg ' expout || exit 77 stderr: -1.1: syntax error -stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error: invalid character: '#' ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30545,12 +30480,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1343: $PREPARSER ./calc input -./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -stderr: -input: ./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30561,24 +30490,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1342: cat stderr -1.1: syntax error -error: null divisor -./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1348: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1346: cat stderr -input: -error: null divisor - | (1 + 1) / (1 - 1) -1.3: syntax error, unexpected number -./calc.at:1342: $PREPARSER ./calc input -./calc.at:1344: cat stderr -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1347: cat stderr +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30588,39 +30501,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -error: null divisor -./calc.at:1342: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: cat stderr input: +./calc.at:1348: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1346: cat stderr + | error +./calc.at:1347: $PREPARSER ./calc input +./calc.at:1343: cat stderr stderr: - | (# + 1) = 1111 input: +1.1: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ ./calc.at:1344: $PREPARSER ./calc input -stderr: -1.3: syntax error, unexpected number -stderr: +input: +input: | (* *) + (*) + (*) ./calc.at:1346: $PREPARSER ./calc input -error: null divisor -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./calc.at:1347: cat stderr +stderr: + | (# + 1) = 1111 +memory exhausted +./calc.at:1343: $PREPARSER ./calc input +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error 1.10: syntax error 1.16: syntax error +1.1: syntax error +stderr: ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1343: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +stderr: +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30631,12 +30551,7 @@ }eg ' expout || exit 77 syntax error: invalid character: '#' -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1348: cat stderr -./calc.at:1342: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30646,13 +30561,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1343: cat stderr -input: -./calc.at:1350: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | 1 = 2 = 3 -stderr: -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1347: cat stderr +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30662,8 +30572,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1344: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30673,30 +30582,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: +./calc.at:1344: cat stderr input: -477. calc.at:1343: ok +./calc.at:1346: cat stderr + | 1 = 2 = 3 +./calc.at:1347: $PREPARSER ./calc input +stderr: +input: +./calc.at:1343: cat stderr +input: + | (#) + (#) = 2222 1.7: syntax error ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 +./calc.at:1344: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1346: $PREPARSER ./calc input stderr: -./types.at:139: ./check -./calc.at:1348: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stdout: -./calc.at:1342: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: -./calc.at:1344: cat stderr +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: cat stderr +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1343: $PREPARSER ./calc input 1.7: syntax error -./types.at:139: $PREPARSER ./test stderr: -476. calc.at:1342: ok +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +syntax error: invalid character: '#' +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30707,28 +30625,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: input: -input: - - | 1 + 2 * 3 + !+ ++ + | 1 + 2 * 3 + !- ++ ./calc.at:1346: $PREPARSER ./calc input stderr: - | (1 + # + 1) = 1111 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1344: $PREPARSER ./calc input -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -444. types.at:139: ok -./calc.at:1347: cat stderr -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: $EGREP -c -v 'Return for a new token:|LAC:' stderr - -stderr: syntax error: invalid character: '#' -input: -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30738,33 +30642,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ -483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc ... -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1351: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -stderr: -input: +./calc.at:1347: cat stderr ./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr +./calc.at:1344: cat stderr +input: +stderr: | | +1 ./calc.at:1347: $PREPARSER ./calc input -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -2.1: syntax error -stderr: -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -2.1: syntax error -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30774,8 +30660,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: input: -./calc.at:1344: "$PERL" -pi -e 'use strict; +2.1: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30785,8 +30676,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: cat stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1343: cat stderr +stderr: +stderr: +2.1: syntax error +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1346: cat stderr +input: +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30796,32 +30694,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error -./calc.at:1348: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1343: $PREPARSER ./calc input +stderr: stderr: -./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -1.1: syntax error, unexpected invalid token -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + 1) / (1 - 1) +syntax error: invalid character: '#' + | 1 + 2 * 3 + !* ++ +./calc.at:1346: $PREPARSER ./calc input +error: null divisor +./calc.at:1343: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1344: $PREPARSER ./calc input -1.1: syntax error, unexpected invalid token -./calc.at:1346: cat stderr +1.14: memory exhausted +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1347: cat stderr -484. calc.at:1353: testing Calculator %debug ... -./calc.at:1353: mv calc.y.tmp calc.y - stderr: -./calc.at:1347: $PREPARSER ./calc /dev/null error: null divisor -./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... stderr: -./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -1.1: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30831,27 +30721,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1354: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -input: -error: null divisor - | 1 + 2 * 3 + !* ++ -./calc.at:1346: $PREPARSER ./calc input +1.14: memory exhausted +./calc.at:1347: $PREPARSER ./calc /dev/null stderr: 1.1: syntax error -./calc.at:1348: cat stderr -stderr: -1.14: memory exhausted -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1344: cat stderr +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30861,7 +30737,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1344: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1343: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30871,15 +30748,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +1.1: syntax error input: -1.14: memory exhausted - | 1 = 2 = 3 -./calc.at:1348: $PREPARSER ./calc input -./calc.at:1344: cat stderr -./calc.at:1347: cat stderr + | (# + 1) = 1111 +./calc.at:1344: $PREPARSER ./calc input +./calc.at:1343: cat stderr stderr: -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30889,28 +30764,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.7: syntax error, unexpected '=' -478. calc.at:1344: ok -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +477. calc.at:1343: ok +./calc.at:1346: cat stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1347: cat stderr +input: + | (#) + (#) = 2222 + input: +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1344: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1347: $PREPARSER ./calc input stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1344: cat stderr stderr: -./calc.at:1346: cat stderr -1.7: syntax error, unexpected '=' -./calc.at:1353: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS 1.2: syntax error 1.18: syntax error 1.23: syntax error 1.41: syntax error 1.1-46: error: 4444 != 1 ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: - input: -./calc.at:1348: "$PERL" -pi -e 'use strict; + | (1 + # + 1) = 1111 +./calc.at:1344: $PREPARSER ./calc input +stderr: +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30920,18 +30817,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: 1.2: syntax error 1.18: syntax error 1.23: syntax error 1.41: syntax error 1.1-46: error: 4444 != 1 - | (#) + (#) = 2222 -./calc.at:1346: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr +syntax error: invalid character: '#' +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -30943,17 +30836,8 @@ }eg ' expout || exit 77 stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -./calc.at:1347: cat stderr -./calc.at:1354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | - | +1 -./calc.at:1348: $PREPARSER ./calc input -stderr: -486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... -./calc.at:1355: +482. calc.at:1350: testing Calculator parse.error=detailed %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1350: if "$POSIXLY_CORRECT_IS_EXPORTED"; then sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y else @@ -30961,7 +30845,18 @@ fi -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1346: cat stderr +syntax error: invalid character: '#' +input: +./calc.at:1347: cat stderr + | (1 + #) = 1111 +./calc.at:1346: $PREPARSER ./calc input +./calc.at:1350: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30971,23 +30866,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -2.1: syntax error, unexpected '+' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +stderr: | (!!) + (1 2) = 1 ./calc.at:1347: $PREPARSER ./calc input -stderr: -2.1: syntax error, unexpected '+' +1.6: syntax error: invalid character: '#' +./calc.at:1344: cat stderr stderr: 1.11: syntax error 1.1-16: error: 2222 != 1 -./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1346: cat stderr ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -30997,11 +30885,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: input: - | (1 + #) = 1111 -./calc.at:1346: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 + | (1 + 1) / (1 - 1) +./calc.at:1344: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' +error: null divisor +./calc.at:1346: cat stderr ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31012,16 +30904,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: cat stderr -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: $PREPARSER ./calc /dev/null +./calc.at:1344: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' +input: +error: null divisor + | (# + 1) = 1111 +./calc.at:1346: $PREPARSER ./calc input stderr: -1.1: syntax error, unexpected end of file -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' ./calc.at:1347: cat stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +1.2: syntax error: invalid character: '#' +./calc.at:1344: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31031,18 +30927,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.1: syntax error, unexpected end of file -input: | (- *) + (1 2) = 1 ./calc.at:1347: $PREPARSER ./calc input stderr: -./calc.at:1346: cat stderr 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 +./calc.at:1350: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1344: cat stderr ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31052,18 +30946,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1355: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: -input: 1.4: syntax error 1.12: syntax error 1.1-17: error: 2222 != 1 - | (# + 1) = 1111 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1348: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +478. calc.at:1344: ok +./calc.at:1346: cat stderr ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31075,18 +30963,18 @@ }eg ' expout || exit 77 input: + | (1 + # + 1) = 1111 +./calc.at:1346: $PREPARSER ./calc input + stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1348: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1347: cat stderr +stderr: +1.6: syntax error: invalid character: '#' +input: + | (* *) + (*) + (*) +./calc.at:1347: $PREPARSER ./calc input ./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31098,19 +30986,25 @@ }eg ' expout || exit 77 stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -stdout: - | (* *) + (*) + (*) -./calc.at:1347: $PREPARSER ./calc input +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1346: cat stderr -./types.at:139: $PREPARSER ./test -./calc.at:1348: "$PERL" -pi -e 'use strict; +483. calc.at:1351: testing Calculator parse.error=detailed %locations %header %name-prefix "calc" api.token.prefix={TOK_} %verbose %yacc ... +./calc.at:1351: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31120,26 +31014,51 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +./calc.at:1351: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (1 + 1) / (1 - 1) +./calc.at:1346: $PREPARSER ./calc input stderr: stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +1.11-17: error: null divisor +./calc.at:1348: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: cat stderr +stderr: +./calc.at:1348: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +1.11-17: error: null divisor +input: input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1348: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1347: $PREPARSER ./calc input stderr: - | (1 + # + 1) = 1111 -./calc.at:1346: $PREPARSER ./calc input -./calc.at:1348: cat stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1346: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31149,18 +31068,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1346: cat stderr input: - | (!!) + (1 2) = 1 +input: + | 1 2 + | 1 + 2 * 3 + !- ++ ./calc.at:1348: $PREPARSER ./calc input +./calc.at:1347: $PREPARSER ./calc input +479. calc.at:1346: ok stderr: stderr: -1.6: syntax error: invalid character: '#' -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +1.3: syntax error, unexpected number ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: cat stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.3: syntax error, unexpected number + +./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31170,11 +31101,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -input: - | 1 + 2 * 3 + !+ ++ ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31185,44 +31111,109 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1351: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1348: cat stderr +./calc.at:1347: cat stderr +input: + | 1 + 2 * 3 + !* ++ ./calc.at:1347: $PREPARSER ./calc input +input: stderr: -./calc.at:1346: cat stderr + | 1//2 +./calc.at:1348: $PREPARSER ./calc input +1.14: memory exhausted ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr stderr: +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +484. calc.at:1353: testing Calculator %debug ... +1.14: memory exhausted +stderr: +./calc.at:1353: mv calc.y.tmp calc.y + +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1353: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: cat stderr +./calc.at:1347: cat stderr input: -./calc.at:1347: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (1 + 1) / (1 - 1) -./calc.at:1346: $PREPARSER ./calc input + | error input: -stderr: - | (- *) + (1 2) = 1 ./calc.at:1348: $PREPARSER ./calc input -1.11-17: error: null divisor -./calc.at:1346: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1347: $PREPARSER ./calc input stderr: stderr: -stdout: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 +1.1: syntax error, unexpected invalid token ./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.1: syntax error, unexpected invalid token +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: cat stderr +./calc.at:1347: cat stderr +input: + | 1 = 2 = 3 input: +./calc.at:1348: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor - | 1 + 2 * 3 + !- ++ +./calc.at:1353: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | (1 + #) = 1111 ./calc.at:1347: $PREPARSER ./calc input +1.7: syntax error, unexpected '=' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1346: "$PERL" -pi -e 'use strict; +stderr: +1.7: syntax error, unexpected '=' +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -31232,8 +31223,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -======== Testing with C++ standard flags: '' +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1348: cat stderr +input: + | + | +1 +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1347: cat stderr +stderr: +2.1: syntax error, unexpected '+' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +2.1: syntax error, unexpected '+' + | (# + 1) = 1111 +./calc.at:1347: $PREPARSER ./calc input stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31244,15 +31260,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1346: cat stderr stderr: +1.2: syntax error: invalid character: '#' ./calc.at:1348: cat stderr -stdout: -stderr: -479. calc.at:1346: ok -./types.at:139: ./check -stdout: ./calc.at:1347: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31263,12 +31273,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: - | (* *) + (*) + (*) -./calc.at:1348: $PREPARSER ./calc input +./calc.at:1348: $PREPARSER ./calc /dev/null +stderr: +1.1: syntax error, unexpected end of file +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1347: cat stderr stderr: +1.1: syntax error, unexpected end of file +stdout: +./calc.at:1350: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1350: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -31280,17 +31294,24 @@ || /\t/ )' calc.c calc.h -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: cat stderr -stdout: +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: + | (1 + # + 1) = 1111 +./calc.at:1347: $PREPARSER ./calc input stderr: - -./calc.at:1353: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1348: cat stderr +1.6: syntax error: invalid character: '#' +input: +./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -31305,26 +31326,42 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1350: $PREPARSER ./calc input -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -input: stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1347: $PREPARSER ./calc input ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1353: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1348: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +stderr: +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 + | 1 2 +./calc.at:1350: $PREPARSER ./calc input +stderr: ./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -31335,14 +31372,106 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.14: memory exhausted +./calc.at:1347: cat stderr +1.3: syntax error, unexpected number +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1348: cat stderr +1.3: syntax error, unexpected number +input: + | (1 + 1) / (1 - 1) +./calc.at:1347: $PREPARSER ./calc input +input: + | (!!) + (1 2) = 1 +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.11-17: error: null divisor ./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1350: cat stderr +1.11-17: error: null divisor +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +input: +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1347: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 +./calc.at:1350: $PREPARSER ./calc input stderr: -./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: ./calc.at:1348: cat stderr +stdout: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1353: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +input: +./calc.at:1347: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1350: cat stderr +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +480. calc.at:1347: ok input: -1.14: memory exhausted | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -31356,21 +31485,15 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1353: $PREPARSER ./calc input -input: stderr: - | 1 2 +input: +./calc.at:1353: $PREPARSER ./calc input + | error +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 ./calc.at:1350: $PREPARSER ./calc input -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 @@ -32388,25 +32511,21 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... -./calc.at:1357: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - stderr: - | 1 + 2 * 3 + !+ ++ -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1348: $PREPARSER ./calc input -stderr: -1.3: syntax error, unexpected number +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected invalid token ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stderr: Starting parse @@ -33427,18 +33546,10 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -1.3: syntax error, unexpected number -./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stdout: -stderr: -./calc.at:1347: cat stderr -./types.at:139: ./check -stdout: -./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +1.1: syntax error, unexpected invalid token +./calc.at:1348: cat stderr input: + | 1 2 ./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -33449,27 +33560,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | 1 2 ./calc.at:1353: $PREPARSER ./calc input input: -input: - | (#) + (#) = 2222 -./calc.at:1347: $PREPARSER ./calc input stderr: -./calc.at:1351: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - - | 1 + 2 * 3 + !- ++ -./calc.at:1348: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -33490,16 +33583,16 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 + | (* *) + (*) + (*) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1348: $PREPARSER ./calc input ./calc.at:1350: cat stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: +485. calc.at:1354: testing Calculator parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Starting parse Entering state 0 Stack now 0 @@ -33520,44 +33613,27 @@ Stack now 0 Cleanup: discarding lookahead token "number" (1.1: 2) Stack now 0 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1351: $PREPARSER ./calc input -stderr: -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 +./calc.at:1354: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + + | 1 = 2 = 3 ./calc.at:1350: $PREPARSER ./calc input -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.7: syntax error, unexpected '=' ./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -33578,15 +33654,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1347: cat stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.7: syntax error, unexpected '=' ./calc.at:1348: cat stderr -input: -input: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - | (1 + #) = 1111 -./calc.at:1347: $PREPARSER ./calc input ./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -33597,45 +33666,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1351: $PREPARSER ./calc input -stderr: -./calc.at:1357: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS ./calc.at:1353: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error, unexpected number -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: + | 1 + 2 * 3 + !+ ++ ./calc.at:1350: cat stderr - | 1 + 2 * 3 + !* ++ ./calc.at:1348: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -stderr: input: stderr: -1.14: memory exhausted -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1//2 -1.3: syntax error, unexpected number -./calc.at:1353: $PREPARSER ./calc input -input: - | error -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1347: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: stderr: +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1351: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -33665,21 +33710,29 @@ Stack now 0 stderr: ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.14: memory exhausted -1.1: syntax error, unexpected invalid token -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +stderr: +./calc.at:1350: $PREPARSER ./calc input +stdout: +./calc.at:1351: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1348: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +./types.at:139: $PREPARSER ./test +stderr: +2.1: syntax error, unexpected '+' stderr: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -33707,10 +33760,33 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.1: ) Stack now 0 -./calc.at:1347: cat stderr -./calc.at:1351: cat stderr -1.1: syntax error, unexpected invalid token -./calc.at:1348: "$PERL" -pi -e 'use strict; +stderr: +2.1: syntax error, unexpected '+' +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1351: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !- ++ +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stderr: +======== Testing with C++ standard flags: '' +stderr: +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -33720,14 +33796,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: - | 1//2 - | (# + 1) = 1111 -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1348: cat stderr -./calc.at:1347: $PREPARSER ./calc input -./calc.at:1350: "$PERL" -pi -e 'use strict; +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1350: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -33737,8 +33811,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: + | 1 2 +./calc.at:1350: $PREPARSER ./calc /dev/null +./calc.at:1351: $PREPARSER ./calc input stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; +stderr: +1.1: syntax error, unexpected end of file +1.3: syntax error, unexpected number +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -33748,28 +33832,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1353: cat stderr stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1350: cat stderr -stderr: - | (#) + (#) = 2222 -stderr: -./calc.at:1348: $PREPARSER ./calc input -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -1.2: syntax error: invalid character: '#' stderr: -./calc.at:1353: cat stderr +1.3: syntax error, unexpected number +1.1: syntax error, unexpected end of file +./calc.at:1348: cat stderr input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1347: "$PERL" -pi -e 'use strict; + | error +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -33779,7 +33851,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -33790,16 +33862,6 @@ }eg ' expout || exit 77 stderr: -input: -stderr: -1.7: syntax error, unexpected '=' - | error -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1353: $PREPARSER ./calc input -stderr: -./calc.at:1351: cat stderr Starting parse Entering state 0 Stack now 0 @@ -33810,20 +33872,8 @@ Stack now 0 ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1347: cat stderr -1.7: syntax error, unexpected '=' -input: -stderr: -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1351: cat stderr +./calc.at:1350: cat stderr input: Starting parse Entering state 0 @@ -33833,12 +33883,36 @@ syntax error Cleanup: discarding lookahead token "invalid token" (1.1: ) Stack now 0 - | (1 + # + 1) = 1111 - | error -./calc.at:1347: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1348: $PREPARSER ./calc input +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +1.14: memory exhausted +./calc.at:1350: $PREPARSER ./calc input +stderr: +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1//2 ./calc.at:1351: $PREPARSER ./calc input stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; +stderr: +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -33848,17 +33922,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1348: cat stderr -1.1: syntax error, unexpected invalid token -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1350: cat stderr -1.6: syntax error: invalid character: '#' -input: -./calc.at:1353: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -33868,12 +33934,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | (1 + #) = 1111 -./calc.at:1348: $PREPARSER ./calc input -1.1: syntax error, unexpected invalid token -stderr: -./calc.at:1347: "$PERL" -pi -e 'use strict; +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -33883,13 +33944,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -1.6: syntax error: invalid character: '#' - | - | +1 -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1353: cat stderr +./calc.at:1350: cat stderr ./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -33900,40 +33956,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -2.1: syntax error, unexpected '+' +./calc.at:1348: cat stderr stderr: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./calc.at:1353: cat stderr -stderr: -./types.at:139: $PREPARSER ./test ./calc.at:1351: cat stderr -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1347: cat stderr -stderr: -2.1: syntax error, unexpected '+' input: +./types.at:139: ./check +input: + | (!!) + (1 2) = 1 +./calc.at:1350: $PREPARSER ./calc input +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y | 1 = 2 = 3 ./calc.at:1353: $PREPARSER ./calc input input: -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) - | 1 = 2 = 3 -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1347: $PREPARSER ./calc input stderr: +stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -33973,27 +34011,23 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.1: ) Stack now 0 -stderr: ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 + | error +./calc.at:1351: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: $PREPARSER ./calc input stderr: -======== Testing with C++ standard flags: '' -./calc.at:1348: cat stderr -1.11-17: error: null divisor -1.7: syntax error, unexpected '=' +stderr: +1.1: syntax error, unexpected invalid token ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -./calc.at:1350: cat stderr Starting parse Entering state 0 Stack now 0 @@ -34033,22 +34067,14 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.1: ) Stack now 0 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stderr: -input: -1.11-17: error: null divisor -./calc.at:1350: $PREPARSER ./calc /dev/null -1.7: syntax error, unexpected '=' - | (# + 1) = 1111 -./calc.at:1348: $PREPARSER ./calc input +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 stderr: stderr: -1.1: syntax error, unexpected end of file 1.2: syntax error: invalid character: '#' -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1347: "$PERL" -pi -e 'use strict; +1.8: syntax error: invalid character: '#' +1.1: syntax error, unexpected invalid token +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34058,7 +34084,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34068,7 +34094,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34078,13 +34104,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.1: syntax error, unexpected end of file -./calc.at:1351: cat stderr -./calc.at:1347: cat stderr -./calc.at:1348: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34094,50 +34114,49 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1348: cat stderr +stderr: +./calc.at:1350: cat stderr +stdout: +./calc.at:1351: cat stderr +./types.at:139: $PREPARSER ./test +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: +stderr: + | (1 + #) = 1111 + | (- *) + (1 2) = 1 +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1350: $PREPARSER ./calc input +stderr: ./calc.at:1353: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: input: - | - | +1 +1.6: syntax error: invalid character: '#' + | 1 = 2 = 3 +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 ./calc.at:1351: $PREPARSER ./calc input -480. calc.at:1347: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1350: cat stderr -2.1: syntax error, unexpected '+' +1.7: syntax error, unexpected '=' +input: +======== Testing with C++ standard flags: '' ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1348: cat stderr -input: -stdout: +stderr: +1.6: syntax error: invalid character: '#' +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 | | +1 -./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1353: $PREPARSER ./calc input stderr: -2.1: syntax error, unexpected '+' stderr: -input: -input: -./calc.at:1354: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - Starting parse Entering state 0 Stack now 0 @@ -34163,11 +34182,11 @@ Stack now 0 Cleanup: discarding lookahead token '+' (1.1: ) Stack now 0 - | (1 + # + 1) = 1111 +1.7: syntax error, unexpected '=' ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1351: "$PERL" -pi -e 'use strict; +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +./calc.at:1348: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -34177,21 +34196,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: $PREPARSER ./calc input -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1351: cat stderr -stderr: -stderr: - -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 +./calc.at:1348: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $PREPARSER ./test +stderr: Starting parse Entering state 0 Stack now 0 @@ -34217,1060 +34235,1980 @@ Stack now 0 Cleanup: discarding lookahead token '+' (1.1: ) Stack now 0 +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1351: cat stderr input: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: $PREPARSER ./calc /dev/null - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1354: $PREPARSER ./calc input -./types.at:139: ./check + | +1 +./calc.at:1351: $PREPARSER ./calc input +input: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1350: cat stderr + | (# + 1) = 1111 +./calc.at:1348: $PREPARSER ./calc input +2.1: syntax error, unexpected '+' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stderr: +1.2: syntax error: invalid character: '#' +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +2.1: syntax error, unexpected '+' +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (* *) + (*) + (*) +./calc.at:1350: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1353: cat stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: $PREPARSER ./calc /dev/null +./calc.at:1351: cat stderr +stderr: +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1351: $PREPARSER ./calc /dev/null +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y 1.1: syntax error, unexpected end of file +stderr: ./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.1: syntax error, unexpected end of file +input: +./calc.at:1350: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1348: $PREPARSER ./calc input +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +stdout: +stderr: +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.6: syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +./calc.at:1350: $PREPARSER ./calc input +stderr: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: cat stderr +./calc.at:1351: cat stderr +stderr: 1.6: syntax error: invalid character: '#' +stderr: +input: +./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: +stderr: +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stdout: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1351: $PREPARSER ./calc input +./types.at:139: ./check +./calc.at:1348: cat stderr +stderr: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1350: $PREPARSER ./calc input stderr: 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -stderr: -./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 -Stack now 0 8 21 30 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +input: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1348: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 6 8 21 +Stack now 0 8 21 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 3) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '*' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.1: 2) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +stderr: +stderr: +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11-17: error: null divisor +./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +1.11-17: error: null divisor +./types.at:139: $PREPARSER ./test +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1351: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1348: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1350: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1353: cat stderr +======== Testing with C++ standard flags: '' +./calc.at:1348: cat stderr +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS + | 1 + 2 * 3 + !* ++ +./calc.at:1350: $PREPARSER ./calc input +481. calc.at:1348: ok +stderr: +input: +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 + | (!!) + (1 2) = 1 +./calc.at:1353: $PREPARSER ./calc input +1.14: memory exhausted +stderr: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.14: memory exhausted + +./calc.at:1351: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (- *) + (1 2) = 1 +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1350: cat stderr +stderr: +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +486. calc.at:1355: testing Calculator parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1355: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + + | (#) + (#) = 2222 +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1353: cat stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (- *) + (1 2) = 1 +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 +Stack now 0 8 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1351: cat stderr +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +input: +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./types.at:139: ./check + | (* *) + (*) + (*) +./calc.at:1351: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 +Stack now 0 8 21 4 12 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '=' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Stack now 0 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1350: cat stderr +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +input: + | (1 + #) = 1111 +./calc.at:1350: $PREPARSER ./calc input +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.6: syntax error: invalid character: '#' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1351: cat stderr +./calc.at:1353: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1355: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +input: +input: +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !+ ++ +./calc.at:1351: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1353: $PREPARSER ./calc input +stderr: +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./calc.at:1350: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 8 21 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 3333) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (14.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1351: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1350: $PREPARSER ./calc input stderr: -1.1: syntax error, unexpected end of file +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -35281,7 +36219,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; +stderr: +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1353: cat stderr +./calc.at:1350: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -35291,7 +36233,216 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1355: "$PERL" -ne ' +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1350: cat stderr +./calc.at:1351: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1353: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +input: +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1350: $PREPARSER ./calc input +input: +stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1351: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +1.14: memory exhausted +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.14: memory exhausted +stderr: +stderr: +stdout: +1.6: syntax error: invalid character: '#' +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +input: +./calc.at:1354: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -35302,8 +36453,202 @@ || /\t/ )' calc.c calc.h + | 1 + 2 * 3 + !- ++ +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1351: cat stderr stderr: -488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1354: $PREPARSER ./calc input +input: +./calc.at:1350: cat stderr + | (#) + (#) = 2222 +stderr: +./calc.at:1351: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) Starting parse Entering state 0 Stack now 0 @@ -36321,57 +37666,16 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1348: cat stderr -input: -./calc.at:1358: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1353: cat stderr stderr: -./calc.at:1350: cat stderr -input: -./calc.at:1353: $PREPARSER ./calc /dev/null +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1350: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -37389,33 +38693,73 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: cat stderr +./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +1.11-17: error: null divisor +./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +input: +1.11-17: error: null divisor +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1 2 - | (1 + 1) / (1 - 1) +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1350: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1354: $PREPARSER ./calc input -./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1348: $PREPARSER ./calc input -input: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) Stack now 0 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: -stderr: - | (!!) + (1 2) = 1 -./calc.at:1350: $PREPARSER ./calc input +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1351: cat stderr +./calc.at:1350: cat stderr +./calc.at:1353: cat stderr stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1351: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -37430,1219 +38774,131 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +482. calc.at:1350: ok +input: +input: + | 1 + 2 * 3 + !* ++ + | (1 + #) = 1111 +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '+' (1.1: ) +Reducing stack by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): +Next token is token '+' (1.1: ) +Reducing stack by rule 7 (line 98): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: cat stderr +stderr: + +stderr: +1.6: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 136): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 20 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 137): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (14.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stderr: -./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -1.11-17: error: null divisor -./calc.at:1348: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -1.11-17: error: null divisor -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -input: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 2 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1348: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1351: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1348: cat stderr -./calc.at:1353: cat stderr -./calc.at:1354: cat stderr -./calc.at:1350: cat stderr -input: -stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1353: $PREPARSER ./calc input -481. calc.at:1348: ok -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 - | (!!) + (1 2) = 1 -stderr: -./calc.at:1351: $PREPARSER ./calc input -input: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Stack now 0 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token @@ -38651,519 +38907,45 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) + $1 = token "number" (1.1: 2) -> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token Next token is token '*' (1.1: ) Shifting token '*' (1.1: ) Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '*' (1.1: ) +Next token is token '+' (1.1: ) Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 1) + $1 = nterm exp (1.1: 2) $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | (- *) + (1 2) = 1 -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: $PREPARSER ./calc input - | 1//2 -./calc.at:1354: $PREPARSER ./calc input -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -stderr: -stderr: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token Next token is token '+' (1.1: ) Reducing stack by rule 7 (line 98): $1 = nterm exp (1.1: 1) $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 3) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) Entering state 8 Stack now 0 8 Next token is token '+' (1.1: ) @@ -39171,134 +38953,26 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token Next token is token '*' (1.1: ) Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 1) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 124): + $1 = token '!' (1.1: ) $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.1: 2) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1355: "$PERL" -pi -e 'use strict; +memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +input: + | 1//2 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -39309,18 +38983,6 @@ }eg ' expout || exit 77 stderr: -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1358: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -stderr: -stderr: -stdout: - -./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 @@ -39348,6 +39010,7 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -39358,48 +39021,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: $PREPARSER ./test -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: cat stderr - | 1//2 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1353: cat stderr ./calc.at:1351: cat stderr stderr: -======== Testing with C++ standard flags: '' -./calc.at:1354: cat stderr Starting parse Entering state 0 Stack now 0 @@ -39428,14 +39051,39 @@ Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 input: -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1353: $PREPARSER ./calc input -input: +487. calc.at:1357: testing Calculator api.pure=full parse.error=detailed %debug %locations %header %name-prefix "calc" %verbose %yacc ... + | (# + 1) = 1111 +./calc.at:1351: $PREPARSER ./calc input +./calc.at:1357: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +./calc.at:1353: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: input: - | (- *) + (1 2) = 1 -./calc.at:1351: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' + | (#) + (#) = 2222 +./calc.at:1353: $PREPARSER ./calc input +./calc.at:1354: cat stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -39445,18 +39093,13 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 Shifting token error (1.1: ) Entering state 11 @@ -39484,25 +39127,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 +syntax error: invalid character: '#' Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) Stack now 0 8 21 4 Shifting token error (1.1: ) @@ -39534,13 +39164,13 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token @@ -39548,8 +39178,7 @@ Reducing stack by rule 6 (line 82): $1 = nterm exp (1.1: 2222) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 + $3 = nterm exp (1.1: 2222) -> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 @@ -39577,52 +39206,21 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | (* *) + (*) + (*) -stderr: -stderr: -./calc.at:1350: $PREPARSER ./calc input +./calc.at:1351: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +./calc.at:1351: cat stderr stderr: -489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1360: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 + | error +./calc.at:1354: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -39632,18 +39230,13 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) +syntax error: invalid character: '#' +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) Stack now 0 4 Shifting token error (1.1: ) Entering state 11 @@ -39671,25 +39264,12 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 +syntax error: invalid character: '#' Shifting token error (1.1: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) Stack now 0 8 21 4 Shifting token error (1.1: ) @@ -39721,13 +39301,13 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token @@ -39735,8 +39315,7 @@ Reducing stack by rule 6 (line 82): $1 = nterm exp (1.1: 2222) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 + $3 = nterm exp (1.1: 2222) -> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 @@ -39763,15 +39342,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | error -./calc.at:1354: $PREPARSER ./calc input stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Stack now 0 @@ -39781,36 +39353,12 @@ Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1351: $PREPARSER ./calc input stderr: stderr: -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.6: syntax error: invalid character: '#' +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -39819,8 +39367,7 @@ 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -./calc.at:1355: cat stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -39830,8 +39377,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: cat stderr -./calc.at:1350: "$PERL" -pi -e 'use strict; +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -39841,7 +39389,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1354: "$PERL" -pi -e 'use strict; +./calc.at:1354: cat stderr +./calc.at:1353: cat stderr +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -39851,31 +39401,58 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1357: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS input: - | error ./calc.at:1351: cat stderr -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1354: cat stderr -./calc.at:1350: cat stderr -stderr: + | 1 = 2 = 3 input: - | (- *) + (1 2) = 1 +./calc.at:1354: $PREPARSER ./calc input + | (1 + #) = 1111 ./calc.at:1353: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (* *) + (*) + (*) -input: -./calc.at:1351: $PREPARSER ./calc input input: + | (1 + 1) / (1 - 1) Starting parse Entering state 0 Stack now 0 @@ -39885,120 +39462,70 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 4 12 Reading a token -Next token is token "number" (1.1: 2) -syntax error +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) -Stack now 0 8 21 4 +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 +Reading a token Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 1111) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -40006,7 +39533,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -40024,33 +39551,9 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1350: $PREPARSER ./calc input -stderr: - | 1 = 2 = 3 -./calc.at:1354: $PREPARSER ./calc input -stderr: -stderr: -stdout: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./types.at:139: ./check -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1351: $PREPARSER ./calc input stderr: -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -40090,6 +39593,10 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 +stderr: +1.11-17: error: null divisor +./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -40099,120 +39606,70 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 4 12 Reading a token -Next token is token "number" (1.1: 2) -syntax error +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 Error: popping nterm exp (1.1: 1) -Stack now 0 8 21 4 +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) -Stack now 0 8 21 4 +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 +Reading a token Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 1111) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -40220,7 +39677,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -40238,12 +39695,7 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40254,60 +39706,9 @@ }eg ' expout || exit 77 stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1350: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1360: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1355: cat stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: "$PERL" -pi -e 'use strict; +1.11-17: error: null divisor +./calc.at:1354: cat stderr +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40317,7 +39718,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: "$PERL" -pi -e 'use strict; +./calc.at:1351: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40328,69 +39729,39 @@ }eg ' expout || exit 77 input: - | 1 + 2 * 3 + !- ++ -./calc.at:1350: $PREPARSER ./calc input -input: -./calc.at:1354: cat stderr + | + | +1 +./calc.at:1354: $PREPARSER ./calc input ./calc.at:1351: cat stderr stderr: -./calc.at:1353: cat stderr - | 1 = 2 = 3 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -input: - | - | +1 -input: -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (* *) + (*) + (*) -./calc.at:1353: $PREPARSER ./calc input -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1351: $PREPARSER ./calc input +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +483. calc.at:1351: ok +./calc.at:1353: cat stderr stderr: Starting parse Entering state 0 @@ -40417,8 +39788,21 @@ Stack now 0 Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1353: $PREPARSER ./calc input + stderr: +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -40428,123 +39812,67 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.1: ) -syntax error +syntax error: invalid character: '#' Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) Error: popping token error (1.1: ) Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) Error: popping token error (1.1: ) -Stack now 0 8 21 4 +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) -> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Entering state 28 +Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -40552,7 +39880,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -40572,84 +39900,7 @@ Cleanup: popping nterm input (1.1: ) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -stderr: +./calc.at:1354: cat stderr Starting parse Entering state 0 Stack now 0 @@ -40659,123 +39910,67 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.1: ) -syntax error +syntax error: invalid character: '#' Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) Error: popping token error (1.1: ) Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) Error: popping token error (1.1: ) Stack now 0 4 Shifting token error (1.1: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) Error: popping token error (1.1: ) -Stack now 0 8 21 4 +Stack now 0 4 Shifting token error (1.1: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token Next token is token ')' (1.1: ) Shifting token ')' (1.1: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 119): $1 = token '(' (1.1: ) $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) -> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 +Entering state 28 +Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Next token is token '\n' (1.1: ) @@ -40783,7 +39978,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -40801,86 +39996,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -stderr: -./calc.at:1350: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: cat stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: -./calc.at:1354: cat stderr -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1351: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1350: $PREPARSER ./calc input - | - | +1 -./calc.at:1355: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1354: $PREPARSER ./calc /dev/null -1.14: memory exhausted -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1353: cat stderr Starting parse Entering state 0 Stack now 0 @@ -40889,60 +40006,18 @@ 1.1: syntax error, unexpected end of file Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 +488. calc.at:1358: testing Calculator api.push-pull=both api.pure=full parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc ... ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -stderr: -stderr: -stderr: -1.14: memory exhausted -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1355: "$PERL" -pi -e 'use strict; +./calc.at:1353: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -40952,6 +40027,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1358: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -40962,211 +40046,131 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1353: cat stderr +./calc.at:1354: cat stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1353: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1354: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 4 12 21 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: cat stderr -stderr: -./calc.at:1355: cat stderr -./calc.at:1350: cat stderr -./calc.at:1354: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stdout: -./calc.at:1355: $PREPARSER ./calc /dev/null -input: -input: -input: -./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: - | 1 + 2 * 3 + !* ++ - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1351: $PREPARSER ./calc input -./calc.at:1354: $PREPARSER ./calc input - | (#) + (#) = 2222 -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 Reading a token Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.14: memory exhausted +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -41485,38 +40489,122 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - - | 1 + 2 * 3 + !- ++ -./calc.at:1353: $PREPARSER ./calc input stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +syntax error: invalid character: '#' +Error: popping token '+' (1.1: ) +Stack now 0 4 12 +Error: popping nterm exp (1.1: 1) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.1: ) +Error: discarding token "invalid token" (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Error: popping token error (1.1: ) +Stack now 0 4 +Shifting token error (1.1: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -stderr: -stderr: +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) Starting parse Entering state 0 Stack now 0 @@ -41834,317 +40922,587 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1354: cat stderr +./calc.at:1353: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1354: $PREPARSER ./calc input +input: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1353: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) +Shifting token '+' (1.1: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.1: ) Reducing stack by rule 7 (line 98): $1 = nterm exp (1.1: 1) $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token Next token is token '-' (1.1: ) Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) ./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.14: memory exhausted -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: $PREPARSER ./calc input stderr: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 4 12 21 1 Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 4 12 21 30 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.1: ) +Next token is token ')' (1.1: ) Reducing stack by rule 7 (line 98): $1 = nterm exp (1.1: 1) $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token Next token is token '-' (1.1: ) Shifting token '-' (1.1: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 123): - $1 = token '!' (1.1: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack by rule 8 (line 99): + $1 = nterm exp (1.1: 1) $2 = token '-' (1.1: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1354: cat stderr -./calc.at:1350: cat stderr + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -42154,31 +41512,504 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1353: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1354: $PREPARSER ./calc input +./calc.at:1353: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +484. calc.at:1353: ok +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stdout: +./calc.at:1355: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' + +./calc.at:1355: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +input: +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1354: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 @@ -42200,14 +42031,14 @@ Stack now 0 6 8 21 30 22 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Stack now 0 6 8 21 30 22 31 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -42215,7 +42046,7 @@ Entering state 30 Stack now 0 6 8 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -42243,7 +42074,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -42321,7 +42152,7 @@ Stack now 0 6 2 10 24 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -42329,7 +42160,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -42356,7 +42187,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -42408,7 +42239,7 @@ Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -42418,7 +42249,7 @@ Shifting token ')' (5.4: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -42442,7 +42273,7 @@ Stack now 0 6 8 24 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -42531,21 +42362,21 @@ Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -42572,7 +42403,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -42645,7 +42476,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -42668,7 +42499,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -42696,7 +42527,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -42773,7 +42604,7 @@ Stack now 0 6 8 20 4 12 20 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -42784,7 +42615,7 @@ Shifting token ')' (10.11: ) Entering state 27 Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -42793,7 +42624,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -42897,7 +42728,7 @@ Stack now 0 6 8 24 33 24 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -42905,7 +42736,7 @@ Entering state 33 Stack now 0 6 8 24 33 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -42983,7 +42814,7 @@ Stack now 0 6 4 12 24 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -42994,7 +42825,7 @@ Shifting token ')' (13.5: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -43018,7 +42849,7 @@ Stack now 0 6 8 24 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -43072,12 +42903,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: cat stderr -stderr: -input: -./calc.at:1355: cat stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | (* *) + (*) + (*) +./calc.at:1354: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -43123,7 +42953,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -43131,7 +42961,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -43223,14 +43053,14 @@ Stack now 0 6 8 21 30 22 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Stack now 0 6 8 21 30 22 31 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -43238,7 +43068,7 @@ Entering state 30 Stack now 0 6 8 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -43266,7 +43096,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -43344,7 +43174,7 @@ Stack now 0 6 2 10 24 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -43352,7 +43182,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -43379,7 +43209,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -43431,7 +43261,7 @@ Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -43441,7 +43271,7 @@ Shifting token ')' (5.4: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -43465,7 +43295,7 @@ Stack now 0 6 8 24 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -43554,21 +43384,21 @@ Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -43595,7 +43425,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -43668,7 +43498,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -43691,7 +43521,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -43719,7 +43549,7 @@ Stack now 0 6 8 19 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 124): +Reducing stack by rule 11 (line 136): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -43796,7 +43626,7 @@ Stack now 0 6 8 20 4 12 20 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -43807,7 +43637,7 @@ Shifting token ')' (10.11: ) Entering state 27 Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -43816,7 +43646,7 @@ Stack now 0 6 8 20 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 113): +Reducing stack by rule 8 (line 119): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -43920,7 +43750,7 @@ Stack now 0 6 8 24 33 24 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -43928,7 +43758,7 @@ Entering state 33 Stack now 0 6 8 24 33 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -44006,7 +43836,7 @@ Stack now 0 6 4 12 24 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -44017,7 +43847,7 @@ Shifting token ')' (13.5: ) Entering state 27 Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 126): +Reducing stack by rule 13 (line 138): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -44041,7 +43871,7 @@ Stack now 0 6 8 24 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 125): +Reducing stack by rule 12 (line 137): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -44095,27 +43925,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | (!!) + (1 2) = 1 -./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1354: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1350: $PREPARSER ./calc input -stderr: -input: +./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' - | (#) + (#) = 2222 -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -44125,121 +43936,132 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -44256,21 +44078,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1351: $PREPARSER ./calc input -input: ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1355: $PREPARSER ./calc input -input: -stderr: -stderr: stderr: - | 1 2 -./calc.at:1357: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -44280,298 +44089,132 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 21 4 -Shifting token error (1.23-27: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) Stack now 0 8 21 4 -Shifting token error (1.33-41: ) +Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.44: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -44588,149 +44231,52 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: +489. calc.at:1360: testing Calculator api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1360: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + + | 1 2 +./calc.at:1355: $PREPARSER ./calc input stderr: -./calc.at:1353: cat stderr -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1354: cat stderr stderr: Starting parse Entering state 0 @@ -44752,14 +44298,11 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1354: $PREPARSER ./calc input stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -44773,319 +44316,365 @@ Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1355: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 8 21 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.20: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 142): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +input: + | 1//2 +./calc.at:1355: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1354: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '*' (1.39: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 @@ -45100,15 +44689,20 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 - | 1 + 2 * 3 + !* ++ -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1354: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -45118,8 +44712,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -45129,380 +44722,275 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1355: cat stderr +./calc.at:1354: cat stderr +stderr: +./calc.at:1360: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1354: $PREPARSER ./calc input +stdout: +./types.at:139: $PREPARSER ./test +input: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) Entering state 15 Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1350: cat stderr -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | error +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1351: cat stderr -./calc.at:1354: cat stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.1: ) -Reducing stack by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.1: ) -Reducing stack by rule 7 (line 98): +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) Entering state 15 Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -input: +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +======== Testing with C++ standard flags: '' +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1354: cat stderr ./calc.at:1355: cat stderr -./calc.at:1357: cat stderr - | (# + 1) = 1111 -./calc.at:1350: $PREPARSER ./calc input input: - | (1 + #) = 1111 input: -stderr: -./calc.at:1351: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 + | 1 = 2 = 3 + | (#) + (#) = 2222 +./calc.at:1355: $PREPARSER ./calc input ./calc.at:1354: $PREPARSER ./calc input stderr: -input: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1//2 - | (!!) + (1 2) = 1 -./calc.at:1357: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1355: $PREPARSER ./calc input -stderr: -stderr: +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -45513,111 +45001,92 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 @@ -45644,7 +45113,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -45659,182 +45130,31 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -45844,111 +45164,92 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 141): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.8: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 139): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 @@ -45975,7 +45276,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -45985,35 +45286,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1353: cat stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -46023,177 +45296,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1350: cat stderr +./calc.at:1355: cat stderr ./calc.at:1354: cat stderr -./calc.at:1351: cat stderr input: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | + | +1 +stderr: +./calc.at:1355: $PREPARSER ./calc input input: - | (1 + # + 1) = 1111 -./calc.at:1350: $PREPARSER ./calc input -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (#) + (#) = 2222 stderr: -./calc.at:1353: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1354: $PREPARSER ./calc input +stdout: stderr: -input: -./calc.at:1357: cat stderr - | (# + 1) = 1111 -./calc.at:1351: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -input: -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: cat stderr - | (* *) + (*) + (*) -stderr: -./calc.at:1354: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.6: syntax error: invalid character: '#' -input: +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./types.at:139: ./check +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y Starting parse Entering state 0 Stack now 0 @@ -46203,253 +45346,79 @@ Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.1: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 8 21 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -46466,35 +45435,35 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | error -./calc.at:1357: $PREPARSER ./calc input ./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: -1.2: syntax error: invalid character: '#' -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 stderr: -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 @@ -46504,132 +45473,79 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -46646,8 +45562,19 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1351: "$PERL" -pi -e 'use strict; +stdout: +./types.at:139: $PREPARSER ./test +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -46657,6 +45584,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1354: cat stderr +./calc.at:1355: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: $PREPARSER ./calc /dev/null +======== Testing with C++ standard flags: '' +input: +stderr: + | (# + 1) = 1111 +./calc.at:1354: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS Starting parse Entering state 0 Stack now 0 @@ -46666,129 +45615,76 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -46805,38 +45701,19 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1351: cat stderr -./calc.at:1350: cat stderr +./calc.at:1357: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' stderr: Starting parse Entering state 0 @@ -46847,129 +45724,76 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 140): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -46986,7 +45810,18 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1357: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -46996,13 +45831,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1354: cat stderr -input: - | (1 + # + 1) = 1111 -input: -./calc.at:1351: $PREPARSER ./calc input +./calc.at:1355: cat stderr input: -./calc.at:1355: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -47012,17 +45857,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1353: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1350: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1354: $PREPARSER ./calc input -stderr: -./calc.at:1357: cat stderr -stderr: +input: stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1354: cat stderr Starting parse Entering state 0 Stack now 0 @@ -47067,1851 +45906,181 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -./calc.at:1350: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: cat stderr -input: -stderr: -stderr: -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Stack now 0 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 6 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 22 -Stack now 0 8 21 30 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 6 8 21 30 22 2 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (1 + #) = 1111 -./calc.at:1354: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1353: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -1.11-17: error: null divisor - | 1 = 2 = 3 -./calc.at:1357: $PREPARSER ./calc input -input: -stderr: - | (* *) + (*) + (*) -stderr: -./calc.at:1355: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1354: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1350: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1350: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1351: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -482. calc.at:1350: ok -input: -./calc.at:1357: cat stderr -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1351: $PREPARSER ./calc input -stderr: -./calc.at:1355: cat stderr -1.11-17: error: null divisor -input: -./calc.at:1351: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr - | - | +1 -./calc.at:1357: $PREPARSER ./calc input -stderr: -stderr: -stderr: -./calc.at:1353: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -input: -1.11-17: error: null divisor - -./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1355: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: -input: -stderr: - | (# + 1) = 1111 -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1358: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1351: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | 1 + 2 * 3 + !* ++ -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 142): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1358: $PREPARSER ./calc input -stderr: -./calc.at:1351: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -input: -./calc.at:1357: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -483. calc.at:1351: ok - | 1 + 2 * 3 + !- ++ -./calc.at:1355: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1357: $PREPARSER ./calc /dev/null -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Return for a new token: -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Return for a new token: -Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Return for a new token: -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Return for a new token: -Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Return for a new token: -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Return for a new token: -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Return for a new token: -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 101): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Return for a new token: -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 124): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 92): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) Entering state 3 @@ -48927,13 +46096,11 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 -Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -48944,13 +46111,11 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 -Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 -Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -48961,7 +46126,6 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 -Return for a new token: Reading a token Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 125): @@ -48982,13 +46146,11 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -48999,7 +46161,6 @@ -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 124): @@ -49032,19 +46193,16 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 -Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 -Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -49055,7 +46213,6 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 -Return for a new token: Reading a token Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 124): @@ -49075,13 +46232,11 @@ -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -49092,7 +46247,6 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 125): @@ -49106,7 +46260,6 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -49117,7 +46270,6 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -49143,7 +46295,6 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -49160,25 +46311,21 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 -Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 -Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 -Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -49189,7 +46336,6 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 -Return for a new token: Reading a token Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 124): @@ -49216,13 +46362,11 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -49233,7 +46377,6 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 124): @@ -49266,7 +46409,6 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -49283,7 +46425,6 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -49294,13 +46435,11 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -49311,7 +46450,6 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 113): @@ -49325,7 +46463,6 @@ Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -49336,7 +46473,6 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 113): @@ -49350,13 +46486,11 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -49367,7 +46501,6 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 124): @@ -49400,7 +46533,6 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -49411,19 +46543,16 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 -Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -49434,13 +46563,11 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 -Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 -Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -49451,7 +46578,6 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 -Return for a new token: Reading a token Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 113): @@ -49472,7 +46598,6 @@ -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 113): @@ -49486,7 +46611,6 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -49497,7 +46621,6 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -49523,7 +46646,6 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -49540,7 +46662,6 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -49551,13 +46672,11 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -49568,13 +46687,11 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 -Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -49585,7 +46702,6 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 -Return for a new token: Reading a token Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 125): @@ -49607,7 +46723,6 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -49618,7 +46733,6 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -49644,13 +46758,11 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 -Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -49661,13 +46773,11 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 -Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 -Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -49678,7 +46788,6 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 -Return for a new token: Reading a token Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 125): @@ -49699,13 +46808,11 @@ -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -49716,7 +46823,6 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 125): @@ -49730,7 +46836,6 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -49741,7 +46846,6 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -49767,7 +46871,6 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -49776,129 +46879,330 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1362: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 8 21 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 22 -Stack now 0 8 21 30 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 8 21 30 22 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 31 -Stack now 0 8 21 30 22 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token '+' (1.11: ) +Next token is token '*' (1.39: ) Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: cat stderr +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + # + 1) = 1111 stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1354: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -49913,13 +47217,11 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -49930,13 +47232,11 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 -Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -49947,7 +47247,6 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 -Return for a new token: Reading a token Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 114): @@ -49969,7 +47268,6 @@ Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.13: 7) Shifting token number (1.13: 7) @@ -49980,7 +47278,6 @@ -> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): @@ -50005,7 +47302,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (2.1: 1) Shifting token number (2.1: 1) @@ -50016,13 +47312,11 @@ -> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) Entering state 21 Stack now 0 6 8 21 -Return for a new token: Reading a token Next token is token number (2.5: 2) Shifting token number (2.5: 2) @@ -50033,19 +47327,16 @@ -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 -Return for a new token: Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 -Return for a new token: Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 -Return for a new token: Reading a token Next token is token number (2.10: 3) Shifting token number (2.10: 3) @@ -50056,7 +47347,6 @@ -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 -Return for a new token: Reading a token Next token is token '=' (2.12: ) Reducing stack by rule 11 (line 124): @@ -50085,13 +47375,11 @@ Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (2.15: 5) Shifting token number (2.15: 5) @@ -50102,7 +47390,6 @@ -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 11 (line 124): @@ -50135,7 +47422,6 @@ -> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) @@ -50152,13 +47438,11 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 -Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -50169,13 +47453,11 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 -Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 -Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -50186,7 +47468,6 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 -Return for a new token: Reading a token Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 125): @@ -50207,13 +47488,11 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -50224,7 +47503,6 @@ -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 124): @@ -50257,19 +47535,16 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 -Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 -Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -50280,7 +47555,6 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 -Return for a new token: Reading a token Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 124): @@ -50300,13 +47574,11 @@ -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -50317,7 +47589,6 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 125): @@ -50331,7 +47602,6 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -50342,7 +47612,6 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -50368,7 +47637,6 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -50385,25 +47653,21 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 -Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 -Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 -Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -50414,7 +47678,6 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 -Return for a new token: Reading a token Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 124): @@ -50441,13 +47704,11 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -50458,7 +47719,6 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 124): @@ -50491,7 +47751,6 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -50508,7 +47767,6 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -50519,13 +47777,11 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -50536,7 +47792,6 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 113): @@ -50550,7 +47805,6 @@ Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -50561,7 +47815,6 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 113): @@ -50575,13 +47828,11 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 -Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -50592,7 +47843,6 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 -Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 124): @@ -50625,7 +47875,6 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -50636,19 +47885,16 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 -Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 -Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -50659,13 +47905,11 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 -Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 -Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -50676,7 +47920,6 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 -Return for a new token: Reading a token Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 113): @@ -50697,7 +47940,6 @@ -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 -Return for a new token: Reading a token Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 113): @@ -50711,7 +47953,6 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -50722,7 +47963,6 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -50748,7 +47988,6 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -50765,7 +48004,6 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -50776,13 +48014,11 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -50793,13 +48029,11 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 -Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -50810,7 +48044,6 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 -Return for a new token: Reading a token Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 125): @@ -50832,7 +48065,6 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -50843,7 +48075,6 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -50869,13 +48100,11 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 -Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -50886,13 +48115,11 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 -Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 -Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -50903,7 +48130,6 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 -Return for a new token: Reading a token Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 125): @@ -50924,13 +48150,11 @@ -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 -Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 -Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -50941,7 +48165,6 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 -Return for a new token: Reading a token Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 125): @@ -50955,7 +48178,6 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 -Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -50966,7 +48188,6 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -50992,7 +48213,6 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -51001,875 +48221,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 143): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 - | (#) + (#) = 2222 -./calc.at:1354: $PREPARSER ./calc input -stderr: -./calc.at:1353: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | 1 2 -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -input: -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1353: $PREPARSER ./calc input -./calc.at:1357: cat stderr -stderr: -./calc.at:1354: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -input: -input: -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1355: $PREPARSER ./calc input -491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1363: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -syntax error: invalid character: '#' -Error: popping token '+' (1.1: ) -Stack now 0 4 12 -Error: popping nterm exp (1.1: 1) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.1: ) -Error: discarding token "invalid token" (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Error: popping token error (1.1: ) -Stack now 0 4 -Shifting token error (1.1: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | (1 + #) = 1111 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1354: $PREPARSER ./calc input -./calc.at:1357: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -51888,7 +48240,7 @@ Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -51932,7 +48284,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -51955,7 +48307,7 @@ Stack now 0 8 21 4 12 21 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -51980,7 +48332,7 @@ Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -51989,7 +48341,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -52039,7 +48391,7 @@ Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -52048,7 +48400,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -52091,7 +48443,7 @@ Stack now 0 8 21 4 12 22 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -52124,7 +48476,7 @@ Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -52133,7 +48485,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -52188,19 +48540,6 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -52241,48 +48580,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -52299,389 +48654,106 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 120): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 144): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Stack now 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 +Stack now 0 4 12 21 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -52698,21 +48770,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1//2 -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1362: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1353: cat stderr -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -52723,16 +48780,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: + | 1 2 +./calc.at:1357: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -52747,30 +48797,28 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1354: cat stderr +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1355: cat stderr -./calc.at:1357: cat stderr - | (1 + 1) / (1 - 1) +./calc.at:1354: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1353: $PREPARSER ./calc input +stderr: +stdout: Starting parse Entering state 0 Stack now 0 @@ -52784,22 +48832,19 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 +input: + | (!!) + (1 2) = 1 +./calc.at:1355: $PREPARSER ./calc input +./types.at:139: ./check +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1354: cat stderr stderr: Starting parse Entering state 0 @@ -52810,150 +48855,139 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 141): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Stack now 0 8 23 32 +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1353: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -input: - | (!!) + (1 2) = 1 -./calc.at:1357: $PREPARSER ./calc input - | (#) + (#) = 2222 - | (# + 1) = 1111 -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1354: $PREPARSER ./calc input -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -52963,6 +48997,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | (1 + 1) / (1 - 1) +./calc.at:1354: $PREPARSER ./calc input stderr: stderr: Starting parse @@ -52974,139 +49011,140 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 Stack now 0 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 27 Stack now 0 4 12 27 -Reducing stack by rule 13 (line 118): +Reducing stack by rule 13 (line 138): $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) Entering state 23 Stack now 0 8 23 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 23 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 20 Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token ')' (1.1: ) -Reducing stack by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 Stack now 0 8 23 4 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) Entering state 32 Stack now 0 8 23 32 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: cat stderr Starting parse Entering state 0 Stack now 0 @@ -53125,7 +49163,7 @@ Shifting token '!' (1.3: ) Entering state 16 Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): +Reducing stack by rule 16 (line 141): $1 = token '!' (1.2: ) $2 = token '!' (1.3: ) Stack now 0 4 @@ -53137,7 +49175,7 @@ Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-3: ) $3 = token ')' (1.4: ) @@ -53184,7 +49222,7 @@ Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.8: ) $2 = token error (1.9-11: ) $3 = token ')' (1.12: ) @@ -53193,7 +49231,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-4: 1111) $2 = token '+' (1.6: ) $3 = nterm exp (1.8-12: 1111) @@ -53247,7 +49285,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -53257,464 +49295,122 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 12 21 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1358: cat stderr -./calc.at:1363: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 12 20 1 Reducing stack by rule 5 (line 101): $1 = token number (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -53731,8 +49427,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1353: "$PERL" -pi -e 'use strict; +input: +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -53742,125 +49438,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1//2 +./calc.at:1357: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./calc.at:1354: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -53871,141 +49480,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1357: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1353: cat stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1355: cat stderr -./calc.at:1354: cat stderr -stderr: -484. calc.at:1353: ok -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -input: -input: - | (- *) + (1 2) = 1 -input: -./calc.at:1357: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1354: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1355: $PREPARSER ./calc input -stderr: -stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +stdout: +./calc.at:1354: cat stderr +./calc.at:1358: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -54015,490 +49523,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: -stderr: -./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1358: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +input: +485. calc.at:1354: ok +./calc.at:1358: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) + | (- *) + (1 2) = 1 +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1357: cat stderr stderr: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1358: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -54518,7 +49574,7 @@ Shifting token error (1.4: ) Entering state 9 Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): +Reducing stack by rule 15 (line 140): $1 = token '-' (1.2: ) $2 = token error (1.4: ) Stack now 0 4 @@ -54537,7 +49593,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -54584,7 +49640,7 @@ Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.9: ) $2 = token error (1.10-12: ) $3 = token ')' (1.13: ) @@ -54593,7 +49649,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-13: 1111) @@ -54647,119 +49703,12 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1355: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1354: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: cat stderr -stderr: -input: -./calc.at:1354: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1360: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1357: cat stderr -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | error +./calc.at:1357: $PREPARSER ./calc input stderr: -input: -input: stderr: - | (* *) + (*) + (*) - | (# + 1) = 1111 Starting parse Entering state 0 Stack now 0 @@ -54773,11 +49722,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -54788,11 +49739,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -54803,6 +49756,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 114): @@ -54824,6 +49778,7 @@ Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.13: 7) Shifting token number (1.13: 7) @@ -54834,6 +49789,7 @@ -> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): @@ -54858,6 +49814,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (2.1: 1) Shifting token number (2.1: 1) @@ -54868,11 +49825,13 @@ -> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) Entering state 21 Stack now 0 6 8 21 +Return for a new token: Reading a token Next token is token number (2.5: 2) Shifting token number (2.5: 2) @@ -54883,16 +49842,19 @@ -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 +Return for a new token: Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 +Return for a new token: Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 +Return for a new token: Reading a token Next token is token number (2.10: 3) Shifting token number (2.10: 3) @@ -54903,6 +49865,7 @@ -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 +Return for a new token: Reading a token Next token is token '=' (2.12: ) Reducing stack by rule 11 (line 124): @@ -54931,11 +49894,13 @@ Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (2.15: 5) Shifting token number (2.15: 5) @@ -54946,6 +49911,7 @@ -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 11 (line 124): @@ -54978,6 +49944,7 @@ -> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) @@ -54994,11 +49961,13 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -55009,11 +49978,13 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 +Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 +Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -55024,6 +49995,7 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 +Return for a new token: Reading a token Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 125): @@ -55044,11 +50016,13 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -55059,6 +50033,7 @@ -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 124): @@ -55091,16 +50066,19 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 +Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -55111,6 +50089,7 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 +Return for a new token: Reading a token Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 124): @@ -55130,11 +50109,13 @@ -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -55145,6 +50126,7 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 125): @@ -55158,6 +50140,7 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -55168,6 +50151,7 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -55193,6 +50177,7 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -55209,21 +50194,25 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 +Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 +Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -55234,6 +50223,7 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 +Return for a new token: Reading a token Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 124): @@ -55260,11 +50250,13 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -55275,6 +50267,7 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 124): @@ -55307,6 +50300,7 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -55323,6 +50317,7 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -55333,11 +50328,13 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -55348,6 +50345,7 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 113): @@ -55361,6 +50359,7 @@ Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -55371,6 +50370,7 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 113): @@ -55384,11 +50384,13 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -55399,6 +50401,7 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 124): @@ -55431,6 +50434,7 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -55441,16 +50445,19 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 +Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -55461,11 +50468,13 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 +Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 +Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -55476,6 +50485,7 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 113): @@ -55496,6 +50506,7 @@ -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 113): @@ -55509,6 +50520,7 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -55519,6 +50531,7 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -55544,6 +50557,7 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -55560,6 +50574,7 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -55570,11 +50585,13 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -55585,11 +50602,13 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 +Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -55600,6 +50619,7 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 +Return for a new token: Reading a token Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 125): @@ -55621,6 +50641,7 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -55631,6 +50652,7 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -55656,11 +50678,13 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -55671,11 +50695,13 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 +Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 +Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -55686,6 +50712,7 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 +Return for a new token: Reading a token Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 125): @@ -55706,11 +50733,13 @@ -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -55721,6 +50750,7 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 125): @@ -55734,6 +50764,7 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -55744,6 +50775,7 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -55769,6 +50801,7 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -55777,56 +50810,18 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | (1 + 1) / (1 - 1) -./calc.at:1355: $PREPARSER ./calc input -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1354: $PREPARSER ./calc input stderr: +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -55836,258 +50831,26 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 140): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 @@ -56097,7 +50860,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -56115,80 +50878,73 @@ Entering state 4 Stack now 0 8 21 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 21 4 -Shifting token error (1.10: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -56196,7 +50952,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -56214,33 +50970,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1364: - if "$POSIXLY_CORRECT_IS_EXPORTED"; then - sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y - else - mv calc.y.tmp calc.y - fi - - -stderr: -./calc.at:1358: cat stderr -stderr: stderr: stderr: +stdout: Starting parse Entering state 0 Stack now 0 @@ -56254,11 +50986,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -56269,11 +51003,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -56284,6 +51020,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 114): @@ -56305,6 +51042,7 @@ Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.13: 7) Shifting token number (1.13: 7) @@ -56315,6 +51053,7 @@ -> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 102): @@ -56339,6 +51078,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (2.1: 1) Shifting token number (2.1: 1) @@ -56349,11 +51089,13 @@ -> $$ = nterm exp (2.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '+' (2.3: ) Shifting token '+' (2.3: ) Entering state 21 Stack now 0 6 8 21 +Return for a new token: Reading a token Next token is token number (2.5: 2) Shifting token number (2.5: 2) @@ -56364,16 +51106,19 @@ -> $$ = nterm exp (2.5: 2) Entering state 30 Stack now 0 6 8 21 30 +Return for a new token: Reading a token Next token is token '*' (2.7: ) Shifting token '*' (2.7: ) Entering state 22 Stack now 0 6 8 21 30 22 +Return for a new token: Reading a token Next token is token '-' (2.9: ) Shifting token '-' (2.9: ) Entering state 2 Stack now 0 6 8 21 30 22 2 +Return for a new token: Reading a token Next token is token number (2.10: 3) Shifting token number (2.10: 3) @@ -56384,6 +51129,7 @@ -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 21 30 22 2 10 +Return for a new token: Reading a token Next token is token '=' (2.12: ) Reducing stack by rule 11 (line 124): @@ -56412,11 +51158,13 @@ Shifting token '=' (2.12: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (2.14: ) Shifting token '-' (2.14: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (2.15: 5) Shifting token number (2.15: 5) @@ -56427,6 +51175,7 @@ -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (2.16-3.0: ) Reducing stack by rule 11 (line 124): @@ -56459,6 +51208,7 @@ -> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (3.1-4.0: ) Shifting token '\n' (3.1-4.0: ) @@ -56475,11 +51225,13 @@ -> $$ = nterm input (1.1-4.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (4.1: ) Shifting token '-' (4.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token number (4.2: 1) Shifting token number (4.2: 1) @@ -56490,11 +51242,13 @@ -> $$ = nterm exp (4.2: 1) Entering state 10 Stack now 0 6 2 10 +Return for a new token: Reading a token Next token is token '^' (4.3: ) Shifting token '^' (4.3: ) Entering state 24 Stack now 0 6 2 10 24 +Return for a new token: Reading a token Next token is token number (4.4: 2) Shifting token number (4.4: 2) @@ -56505,6 +51259,7 @@ -> $$ = nterm exp (4.4: 2) Entering state 33 Stack now 0 6 2 10 24 33 +Return for a new token: Reading a token Next token is token '=' (4.6: ) Reducing stack by rule 12 (line 125): @@ -56525,11 +51280,13 @@ Shifting token '=' (4.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (4.8: ) Shifting token '-' (4.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (4.9: 1) Shifting token number (4.9: 1) @@ -56540,6 +51297,7 @@ -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (4.10-5.0: ) Reducing stack by rule 11 (line 124): @@ -56572,16 +51330,19 @@ -> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (5.1: ) Shifting token '(' (5.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token '-' (5.2: ) Shifting token '-' (5.2: ) Entering state 2 Stack now 0 6 4 2 +Return for a new token: Reading a token Next token is token number (5.3: 1) Shifting token number (5.3: 1) @@ -56592,6 +51353,7 @@ -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 +Return for a new token: Reading a token Next token is token ')' (5.4: ) Reducing stack by rule 11 (line 124): @@ -56611,11 +51373,13 @@ -> $$ = nterm exp (5.1-4: -1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (5.5: ) Shifting token '^' (5.5: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (5.6: 2) Shifting token number (5.6: 2) @@ -56626,6 +51390,7 @@ -> $$ = nterm exp (5.6: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (5.8: ) Reducing stack by rule 12 (line 125): @@ -56639,6 +51404,7 @@ Shifting token '=' (5.8: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (5.10: 1) Shifting token number (5.10: 1) @@ -56649,6 +51415,7 @@ -> $$ = nterm exp (5.10: 1) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (5.11-6.0: ) Reducing stack by rule 6 (line 102): @@ -56674,6 +51441,7 @@ -> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (6.1-7.0: ) Shifting token '\n' (6.1-7.0: ) @@ -56690,21 +51458,25 @@ -> $$ = nterm input (1.1-7.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '-' (7.1: ) Shifting token '-' (7.1: ) Entering state 2 Stack now 0 6 2 +Return for a new token: Reading a token Next token is token '-' (7.2: ) Shifting token '-' (7.2: ) Entering state 2 Stack now 0 6 2 2 +Return for a new token: Reading a token Next token is token '-' (7.3: ) Shifting token '-' (7.3: ) Entering state 2 Stack now 0 6 2 2 2 +Return for a new token: Reading a token Next token is token number (7.4: 1) Shifting token number (7.4: 1) @@ -56715,6 +51487,7 @@ -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 +Return for a new token: Reading a token Next token is token '=' (7.6: ) Reducing stack by rule 11 (line 124): @@ -56741,11 +51514,13 @@ Shifting token '=' (7.6: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (7.8: ) Shifting token '-' (7.8: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (7.9: 1) Shifting token number (7.9: 1) @@ -56756,6 +51531,7 @@ -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (7.10-8.0: ) Reducing stack by rule 11 (line 124): @@ -56788,6 +51564,7 @@ -> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (8.1-9.0: ) Shifting token '\n' (8.1-9.0: ) @@ -56804,6 +51581,7 @@ -> $$ = nterm input (1.1-9.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (9.1: 1) Shifting token number (9.1: 1) @@ -56814,11 +51592,13 @@ -> $$ = nterm exp (9.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (9.3: ) Shifting token '-' (9.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.5: 2) Shifting token number (9.5: 2) @@ -56829,6 +51609,7 @@ -> $$ = nterm exp (9.5: 2) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 113): @@ -56842,6 +51623,7 @@ Shifting token '-' (9.7: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token number (9.9: 3) Shifting token number (9.9: 3) @@ -56852,6 +51634,7 @@ -> $$ = nterm exp (9.9: 3) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (9.11: ) Reducing stack by rule 8 (line 113): @@ -56865,11 +51648,13 @@ Shifting token '=' (9.11: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token '-' (9.13: ) Shifting token '-' (9.13: ) Entering state 2 Stack now 0 6 8 19 2 +Return for a new token: Reading a token Next token is token number (9.14: 4) Shifting token number (9.14: 4) @@ -56880,6 +51665,7 @@ -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 19 2 10 +Return for a new token: Reading a token Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 11 (line 124): @@ -56912,6 +51698,7 @@ -> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (10.1: 1) Shifting token number (10.1: 1) @@ -56922,16 +51709,19 @@ -> $$ = nterm exp (10.1: 1) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '-' (10.3: ) Shifting token '-' (10.3: ) Entering state 20 Stack now 0 6 8 20 +Return for a new token: Reading a token Next token is token '(' (10.5: ) Shifting token '(' (10.5: ) Entering state 4 Stack now 0 6 8 20 4 +Return for a new token: Reading a token Next token is token number (10.6: 2) Shifting token number (10.6: 2) @@ -56942,11 +51732,13 @@ -> $$ = nterm exp (10.6: 2) Entering state 12 Stack now 0 6 8 20 4 12 +Return for a new token: Reading a token Next token is token '-' (10.8: ) Shifting token '-' (10.8: ) Entering state 20 Stack now 0 6 8 20 4 12 20 +Return for a new token: Reading a token Next token is token number (10.10: 3) Shifting token number (10.10: 3) @@ -56957,6 +51749,7 @@ -> $$ = nterm exp (10.10: 3) Entering state 29 Stack now 0 6 8 20 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 113): @@ -56977,6 +51770,7 @@ -> $$ = nterm exp (10.5-11: -1) Entering state 29 Stack now 0 6 8 20 29 +Return for a new token: Reading a token Next token is token '=' (10.13: ) Reducing stack by rule 8 (line 113): @@ -56990,6 +51784,7 @@ Shifting token '=' (10.13: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (10.15: 2) Shifting token number (10.15: 2) @@ -57000,6 +51795,7 @@ -> $$ = nterm exp (10.15: 2) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (10.16-11.0: ) Reducing stack by rule 6 (line 102): @@ -57025,6 +51821,7 @@ -> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '\n' (11.1-12.0: ) Shifting token '\n' (11.1-12.0: ) @@ -57041,6 +51838,7 @@ -> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token number (12.1: 2) Shifting token number (12.1: 2) @@ -57051,11 +51849,13 @@ -> $$ = nterm exp (12.1: 2) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (12.2: ) Shifting token '^' (12.2: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (12.3: 2) Shifting token number (12.3: 2) @@ -57066,11 +51866,13 @@ -> $$ = nterm exp (12.3: 2) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '^' (12.4: ) Shifting token '^' (12.4: ) Entering state 24 Stack now 0 6 8 24 33 24 +Return for a new token: Reading a token Next token is token number (12.5: 3) Shifting token number (12.5: 3) @@ -57081,6 +51883,7 @@ -> $$ = nterm exp (12.5: 3) Entering state 33 Stack now 0 6 8 24 33 24 33 +Return for a new token: Reading a token Next token is token '=' (12.7: ) Reducing stack by rule 12 (line 125): @@ -57102,6 +51905,7 @@ Shifting token '=' (12.7: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (12.9-11: 256) Shifting token number (12.9-11: 256) @@ -57112,6 +51916,7 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (12.12-13.0: ) Reducing stack by rule 6 (line 102): @@ -57137,11 +51942,13 @@ -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Next token is token '(' (13.1: ) Shifting token '(' (13.1: ) Entering state 4 Stack now 0 6 4 +Return for a new token: Reading a token Next token is token number (13.2: 2) Shifting token number (13.2: 2) @@ -57152,11 +51959,13 @@ -> $$ = nterm exp (13.2: 2) Entering state 12 Stack now 0 6 4 12 +Return for a new token: Reading a token Next token is token '^' (13.3: ) Shifting token '^' (13.3: ) Entering state 24 Stack now 0 6 4 12 24 +Return for a new token: Reading a token Next token is token number (13.4: 2) Shifting token number (13.4: 2) @@ -57167,6 +51976,7 @@ -> $$ = nterm exp (13.4: 2) Entering state 33 Stack now 0 6 4 12 24 33 +Return for a new token: Reading a token Next token is token ')' (13.5: ) Reducing stack by rule 12 (line 125): @@ -57187,11 +51997,13 @@ -> $$ = nterm exp (13.1-5: 4) Entering state 8 Stack now 0 6 8 +Return for a new token: Reading a token Next token is token '^' (13.6: ) Shifting token '^' (13.6: ) Entering state 24 Stack now 0 6 8 24 +Return for a new token: Reading a token Next token is token number (13.7: 3) Shifting token number (13.7: 3) @@ -57202,6 +52014,7 @@ -> $$ = nterm exp (13.7: 3) Entering state 33 Stack now 0 6 8 24 33 +Return for a new token: Reading a token Next token is token '=' (13.9: ) Reducing stack by rule 12 (line 125): @@ -57215,6 +52028,7 @@ Shifting token '=' (13.9: ) Entering state 19 Stack now 0 6 8 19 +Return for a new token: Reading a token Next token is token number (13.11-12: 64) Shifting token number (13.11-12: 64) @@ -57225,6 +52039,7 @@ -> $$ = nterm exp (13.11-12: 64) Entering state 28 Stack now 0 6 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 102): @@ -57250,6 +52065,7 @@ -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (14.1: ) @@ -57258,6 +52074,104 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./types.at:139: $PREPARSER ./test +stderr: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +input: + | 1 2 +./calc.at:1358: $PREPARSER ./calc input +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +490. calc.at:1362: testing Calculator %no-lines api.pure parse.error=detailed %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1362: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + +======== Testing with C++ standard flags: '' +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +input: +./calc.at:1357: cat stderr + | (* *) + (*) + (*) +./calc.at:1355: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -57292,7 +52206,7 @@ Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -57327,7 +52241,7 @@ Shifting token ')' (1.11: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.9: ) $2 = token error (1.10: ) $3 = token ')' (1.11: ) @@ -57336,7 +52250,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) $3 = nterm exp (1.9-11: 1111) @@ -57370,7 +52284,7 @@ Shifting token ')' (1.17: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.15: ) $2 = token error (1.16: ) $3 = token ')' (1.17: ) @@ -57379,7 +52293,7 @@ Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1-11: 2222) $2 = token '+' (1.13: ) $3 = nterm exp (1.15-17: 1111) @@ -57409,7 +52323,23 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | 1 = 2 = 3 +./calc.at:1357: $PREPARSER ./calc input +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -57419,113 +52349,123 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -57533,7 +52473,7 @@ Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -57551,146 +52491,89 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | - | +1 -./calc.at:1358: $PREPARSER ./calc input -./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: -./calc.at:1354: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -57700,8 +52583,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1355: "$PERL" -pi -e 'use strict; + | 1//2 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1355: cat stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -57711,35 +52596,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1357: cat stderr stderr: Starting parse Entering state 0 @@ -57754,34 +52610,26 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1354: cat stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: cat stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: cat stderr input: -485. calc.at:1354: ok - | 1 + 2 * 3 + !+ ++ -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1358: cat stderr -stderr: stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -57795,13 +52643,26 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Return for a new token: +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) +Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 + | 1 + 2 * 3 + !+ ++ +./calc.at:1355: $PREPARSER ./calc input +stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -57847,7 +52708,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -57855,7 +52716,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -57876,28 +52737,27 @@ Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Reducing stack by rule 17 (line 142): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1358: $PREPARSER ./calc /dev/null -./calc.at:1355: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | + | +1 stderr: -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -57943,7 +52803,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -57951,7 +52811,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -57972,111 +52832,26 @@ Shifting token '+' (1.14: ) Entering state 14 Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): +Reducing stack by rule 17 (line 142): $1 = token '!' (1.13: ) $2 = token '+' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1355: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +stderr: +./calc.at:1358: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -58086,153 +52861,142 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1362: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./types.at:139: ./check +input: +input: +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -./calc.at:1360: cat stderr - -input: -stderr: | 1 + 2 * 3 + !- ++ -./calc.at:1357: $PREPARSER ./calc input + | error +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1355: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 139): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 30 22 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 143): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: Starting parse Entering state 0 @@ -58279,7 +53043,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -58287,7 +53051,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -58308,14 +53072,21 @@ Shifting token '-' (1.14: ) Entering state 13 Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Reducing stack by rule 18 (line 143): $1 = token '!' (1.13: ) $2 = token '-' (1.14: ) Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -58325,10 +53096,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1//2 -./calc.at:1360: $PREPARSER ./calc input -stderr: ./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -58339,9 +53106,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1357: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1355: cat stderr +./calc.at:1357: $PREPARSER ./calc /dev/null stderr: -./calc.at:1364: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS ./calc.at:1358: cat stderr +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 + | 1 + 2 * 3 + !* ++ +./calc.at:1355: $PREPARSER ./calc input +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -58387,7 +53179,7 @@ Stack now 0 8 21 30 22 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): +Reducing stack by rule 9 (line 120): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -58395,7 +53187,7 @@ Entering state 30 Stack now 0 8 21 30 Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): +Reducing stack by rule 7 (line 118): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -58412,16 +53204,32 @@ Entering state 5 Stack now 0 8 21 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 + | 1 = 2 = 3 +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -58436,22 +53244,133 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 120): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 144): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stdout: +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./types.at:139: $PREPARSER ./test +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Return for a new token: +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1355: cat stderr -./calc.at:1357: "$PERL" -pi -e 'use strict; +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1357: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -58461,14 +53380,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -493. calc.at:1367: testing Calculator parse.error=custom ... -./calc.at:1367: mv calc.y.tmp calc.y - -input: -./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1358: $PREPARSER ./calc input stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 @@ -58482,25 +53396,41 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1357: cat stderr input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1357: $PREPARSER ./calc input +======== Testing with C++ standard flags: '' stderr: - | (1 + 1) / (1 - 1) -./calc.at:1355: $PREPARSER ./calc input +input: Starting parse Entering state 0 Stack now 0 @@ -58509,7 +53439,6 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token ')' (1.2: ) 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -58527,19 +53456,16 @@ -> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.6: ) Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token number (1.7: 1) Shifting token number (1.7: 1) @@ -58550,13 +53476,11 @@ -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token Next token is token '+' (1.9: ) Shifting token '+' (1.9: ) Entering state 21 Stack now 0 8 21 4 12 21 -Return for a new token: Reading a token Next token is token number (1.11: 1) Shifting token number (1.11: 1) @@ -58567,7 +53491,6 @@ -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 -Return for a new token: Reading a token Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): @@ -58581,7 +53504,6 @@ Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 4 12 21 -Return for a new token: Reading a token Next token is token number (1.15: 1) Shifting token number (1.15: 1) @@ -58592,7 +53514,6 @@ -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 -Return for a new token: Reading a token Next token is token '+' (1.17: ) Reducing stack by rule 7 (line 112): @@ -58606,7 +53527,6 @@ Shifting token '+' (1.17: ) Entering state 21 Stack now 0 8 21 4 12 21 -Return for a new token: Reading a token Next token is token ')' (1.18: ) 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -58628,7 +53548,6 @@ -> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 112): @@ -58642,13 +53561,11 @@ Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.22: ) Shifting token '(' (1.22: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token '*' (1.23: ) 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -58662,7 +53579,6 @@ Shifting token error (1.23: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token '*' (1.25: ) Error: discarding token '*' (1.25: ) @@ -58671,7 +53587,6 @@ Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token '*' (1.27: ) Error: discarding token '*' (1.27: ) @@ -58680,7 +53595,6 @@ Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) @@ -58693,7 +53607,6 @@ -> $$ = nterm exp (1.22-28: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 112): @@ -58707,13 +53620,11 @@ Shifting token '+' (1.30: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.32: ) Shifting token '(' (1.32: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token number (1.33: 1) Shifting token number (1.33: 1) @@ -58724,13 +53635,11 @@ -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token Next token is token '*' (1.35: ) Shifting token '*' (1.35: ) Entering state 22 Stack now 0 8 21 4 12 22 -Return for a new token: Reading a token Next token is token number (1.37: 2) Shifting token number (1.37: 2) @@ -58741,7 +53650,6 @@ -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 -Return for a new token: Reading a token Next token is token '*' (1.39: ) Reducing stack by rule 9 (line 114): @@ -58755,7 +53663,6 @@ Shifting token '*' (1.39: ) Entering state 22 Stack now 0 8 21 4 12 22 -Return for a new token: Reading a token Next token is token '*' (1.41: ) 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -58773,7 +53680,6 @@ Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) @@ -58786,7 +53692,6 @@ -> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 112): @@ -58800,7 +53705,6 @@ Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.46: 1) Shifting token number (1.46: 1) @@ -58811,7 +53715,6 @@ -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): @@ -58837,7 +53740,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -58846,10 +53748,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -58859,6 +53758,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: $PREPARSER ./calc input +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -58868,122 +53773,102 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -59001,93 +53886,7 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1357: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1358: cat stderr Starting parse Entering state 0 Stack now 0 @@ -59096,7 +53895,6 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token ')' (1.2: ) 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -59114,19 +53912,16 @@ -> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.6: ) Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token number (1.7: 1) Shifting token number (1.7: 1) @@ -59137,13 +53932,11 @@ -> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token Next token is token '+' (1.9: ) Shifting token '+' (1.9: ) Entering state 21 Stack now 0 8 21 4 12 21 -Return for a new token: Reading a token Next token is token number (1.11: 1) Shifting token number (1.11: 1) @@ -59154,7 +53947,6 @@ -> $$ = nterm exp (1.11: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 -Return for a new token: Reading a token Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): @@ -59168,7 +53960,6 @@ Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 4 12 21 -Return for a new token: Reading a token Next token is token number (1.15: 1) Shifting token number (1.15: 1) @@ -59179,7 +53970,6 @@ -> $$ = nterm exp (1.15: 1) Entering state 30 Stack now 0 8 21 4 12 21 30 -Return for a new token: Reading a token Next token is token '+' (1.17: ) Reducing stack by rule 7 (line 112): @@ -59193,7 +53983,6 @@ Shifting token '+' (1.17: ) Entering state 21 Stack now 0 8 21 4 12 21 -Return for a new token: Reading a token Next token is token ')' (1.18: ) 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -59215,7 +54004,6 @@ -> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 112): @@ -59229,13 +54017,11 @@ Shifting token '+' (1.20: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.22: ) Shifting token '(' (1.22: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token '*' (1.23: ) 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -59249,7 +54035,6 @@ Shifting token error (1.23: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token '*' (1.25: ) Error: discarding token '*' (1.25: ) @@ -59258,7 +54043,6 @@ Shifting token error (1.23-25: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token '*' (1.27: ) Error: discarding token '*' (1.27: ) @@ -59267,7 +54051,6 @@ Shifting token error (1.23-27: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) @@ -59280,7 +54063,6 @@ -> $$ = nterm exp (1.22-28: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 112): @@ -59294,13 +54076,11 @@ Shifting token '+' (1.30: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.32: ) Shifting token '(' (1.32: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token number (1.33: 1) Shifting token number (1.33: 1) @@ -59311,13 +54091,11 @@ -> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token Next token is token '*' (1.35: ) Shifting token '*' (1.35: ) Entering state 22 Stack now 0 8 21 4 12 22 -Return for a new token: Reading a token Next token is token number (1.37: 2) Shifting token number (1.37: 2) @@ -59328,7 +54106,6 @@ -> $$ = nterm exp (1.37: 2) Entering state 31 Stack now 0 8 21 4 12 22 31 -Return for a new token: Reading a token Next token is token '*' (1.39: ) Reducing stack by rule 9 (line 114): @@ -59342,7 +54119,6 @@ Shifting token '*' (1.39: ) Entering state 22 Stack now 0 8 21 4 12 22 -Return for a new token: Reading a token Next token is token '*' (1.41: ) 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -59360,7 +54136,6 @@ Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) @@ -59373,7 +54148,6 @@ -> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 112): @@ -59387,7 +54161,6 @@ Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.46: 1) Shifting token number (1.46: 1) @@ -59398,7 +54171,6 @@ -> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): @@ -59424,7 +54196,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -59433,6 +54204,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -59442,122 +54215,102 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 118): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 138): +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 119): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 138): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 121): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -59574,90 +54327,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1360: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: "$PERL" -pi -e 'use strict; + | + | +1 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -59667,6 +54340,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: cat stderr ./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -59677,8 +54379,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1357: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -59688,40 +54416,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1358: cat stderr -./calc.at:1355: cat stderr -stderr: -./calc.at:1357: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS input: -486. calc.at:1355: ok -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr | (!!) + (1 2) = 1 -input: -./calc.at:1358: $PREPARSER ./calc input - | (#) + (#) = 2222 ./calc.at:1357: $PREPARSER ./calc input +./calc.at:1358: cat stderr +input: stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -stderr: + | (1 + #) = 1111 +./calc.at:1358: $PREPARSER ./calc /dev/null +./calc.at:1355: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -59730,13 +54434,11 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token '!' (1.2: ) Shifting token '!' (1.2: ) Entering state 5 Stack now 0 4 5 -Return for a new token: Reading a token Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) @@ -59749,7 +54451,6 @@ Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) @@ -59762,19 +54463,16 @@ -> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.6: ) Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.8: ) Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token number (1.9: 1) Shifting token number (1.9: 1) @@ -59785,7 +54483,6 @@ -> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token Next token is token number (1.11: 2) 1.11: syntax error, unexpected number @@ -59801,7 +54498,6 @@ Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) @@ -59814,7 +54510,6 @@ -> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 112): @@ -59828,7 +54523,6 @@ Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -59839,7 +54533,6 @@ -> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): @@ -59865,7 +54558,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -59874,7 +54566,19 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 ./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -59884,102 +54588,79 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -59996,9 +54677,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - +stdout: Starting parse Entering state 0 Stack now 0 @@ -60007,13 +54688,11 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token Next token is token '!' (1.2: ) Shifting token '!' (1.2: ) Entering state 5 Stack now 0 4 5 -Return for a new token: Reading a token Next token is token '!' (1.3: ) Shifting token '!' (1.3: ) @@ -60026,7 +54705,6 @@ Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.4: ) Shifting token ')' (1.4: ) @@ -60039,19 +54717,16 @@ -> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '+' (1.6: ) Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token Next token is token '(' (1.8: ) Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: Reading a token Next token is token number (1.9: 1) Shifting token number (1.9: 1) @@ -60062,7 +54737,6 @@ -> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 21 4 12 -Return for a new token: Reading a token Next token is token number (1.11: 2) 1.11: syntax error, unexpected number @@ -60078,7 +54752,6 @@ Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token Next token is token ')' (1.12: ) Shifting token ')' (1.12: ) @@ -60091,7 +54764,6 @@ -> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 112): @@ -60105,7 +54777,6 @@ Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -60116,7 +54787,6 @@ -> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): @@ -60142,7 +54812,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -60151,7 +54820,30 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1360: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +stderr: +stderr: +./calc.at:1360: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -60161,7 +54853,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +stdout: Starting parse Entering state 0 Stack now 0 @@ -60171,1338 +54863,70 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: cat stderr -./calc.at:1358: cat stderr -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -input: - | (- *) + (1 2) = 1 -./calc.at:1358: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1360: $PREPARSER ./calc input -stderr: -494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc} ... -./calc.at:1357: cat stderr -./calc.at:1368: mv calc.y.tmp calc.y - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Return for a new token: -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Return for a new token: -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Return for a new token: -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1357: $PREPARSER ./calc input -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1358: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1358: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: cat stderr -./calc.at:1368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -input: - | - | +1 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: - | (# + 1) = 1111 -./calc.at:1357: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: cat stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 Next token is token '\n' (1.15-2.0: ) @@ -61528,1845 +54952,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 - | 1 + 2 * 3 + !+ ++ -./calc.at:1358: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1360: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1357: $PREPARSER ./calc input -./calc.at:1360: $PREPARSER ./calc /dev/null -stderr: -input: -stderr: - | 1 + 2 * 3 + !- ++ -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 131): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1357: cat stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./calc.at:1358: cat stderr -input: -./calc.at:1360: cat stderr -./types.at:139: $PREPARSER ./test - | (1 + 1) / (1 - 1) -./calc.at:1357: $PREPARSER ./calc input -stderr: -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1358: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1360: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Return for a new token: -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Return for a new token: -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Return for a new token: -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 132): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1357: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./calc.at:1362: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: +./types.at:139: ./check ./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -63377,8 +54963,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' ./calc.at:1357: cat stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -63393,22 +54979,7 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1367: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -487. calc.at:1357: ok -stderr: -./calc.at:1358: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -63418,6 +54989,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1358: cat stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1357: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1355: cat stderr Starting parse Entering state 0 Stack now 0 @@ -64435,31 +56014,164 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1360: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1367: $PREPARSER ./calc input - | (#) + (#) = 2222 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1358: $PREPARSER ./calc input +input: stderr: stderr: - -stderr: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1355: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -64468,112 +56180,130 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' Reading a token -Shifting token error (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 -Return for a new token: Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 -Return for a new token: -1.8: syntax error: invalid character: '#' Reading a token -Shifting token error (1.8: ) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 -Return for a new token: Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 -Return for a new token: Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -64582,7 +56312,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -64591,8 +56320,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: Starting parse Entering state 0 Stack now 0 @@ -65610,9 +57337,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (!!) + (1 2) = 1 -./calc.at:1360: $PREPARSER ./calc input +./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: stderr: Starting parse @@ -65623,9 +57348,8 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' Reading a token +1.2: syntax error: invalid character: '#' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -65636,99 +57360,65 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Return for a new token: -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Return for a new token: -1.8: syntax error: invalid character: '#' Reading a token -Shifting token error (1.8: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 -Return for a new token: +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Return for a new token: -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -65737,7 +57427,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -65746,8 +57435,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -65756,279 +57444,326 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Return for a new token: +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (1.14: ) +Next token is token '+' (1.20: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 2 -./calc.at:1362: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 129): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Return for a new token: +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Return for a new token: +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Return for a new token: +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) Stack now 0 8 21 4 -Shifting token error (1.9-11: ) +Shifting token error (1.33-41: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -66037,6 +57772,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -66045,46 +57781,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 2 -./calc.at:1367: $PREPARSER ./calc input -stderr: -495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -stdout: -stderr: -./calc.at:1358: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -stderr: -./calc.at:1369: mv calc.y.tmp calc.y - -stdout: -./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -input: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -66094,21 +57794,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1358: $PREPARSER ./calc input -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -stderr: -./calc.at:1363: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - Starting parse Entering state 0 Stack now 0 @@ -66117,60 +57802,51 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' -Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Reducing stack by rule 14 (line 139): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -66181,7 +57857,6 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 -Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -66206,7 +57881,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -66215,30 +57889,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1360: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 +input: stderr: Starting parse Entering state 0 @@ -66250,85 +57901,324 @@ Stack now 0 4 Return for a new token: Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 Return for a new token: -1.6: syntax error: invalid character: '#' Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Return for a new token: +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Return for a new token: +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Return for a new token: Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Return for a new token: +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Return for a new token: +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Return for a new token: +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Return for a new token: Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Return for a new token: Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -66346,23 +58236,31 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1363: $PREPARSER ./calc input -input: -./calc.at:1362: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1357: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -66372,7 +58270,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1355: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -66382,1028 +58280,742 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1360: $PREPARSER ./calc input -stderr: -./calc.at:1367: cat stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1355: cat stderr +./calc.at:1358: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 + | (* *) + (*) + (*) +./calc.at:1357: $PREPARSER ./calc input +input: +input: +stderr: + | (!!) + (1 2) = 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Return for a new token: Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 +Stack now 0 8 +Return for a new token: Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 4 12 +Return for a new token: Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 19 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1360: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 8 20 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 4 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 8 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Stack now 0 4 12 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 139): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -67412,130 +59024,133 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Return for a new token: Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 128): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -67544,6 +59159,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -67552,26 +59168,113 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: cat stderr -./calc.at:1358: cat stderr -input: +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1//2 -./calc.at:1367: $PREPARSER ./calc input +./calc.at:1360: $PREPARSER ./calc input stderr: +./calc.at:1357: cat stderr +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr +./calc.at:1358: cat stderr stderr: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1357: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 stderr: +input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 @@ -67581,12 +59284,12 @@ Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 @@ -67596,990 +59299,294 @@ Entering state 22 Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 101): +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 +Stack now 0 8 21 Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1355: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1358: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Stack now 0 4 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 4 12 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 6 8 20 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 6 8 20 4 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 20 -Stack now 0 6 8 20 4 12 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 29 -Stack now 0 6 8 20 4 12 20 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Stack now 0 @@ -68588,11 +59595,13 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token '-' (1.2: ) Shifting token '-' (1.2: ) Entering state 2 Stack now 0 4 2 +Return for a new token: Reading a token Next token is token '*' (1.4: ) 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -68613,6 +59622,7 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) @@ -68625,16 +59635,19 @@ -> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token number (1.10: 1) Shifting token number (1.10: 1) @@ -68645,6 +59658,7 @@ -> $$ = nterm exp (1.10: 1) Entering state 12 Stack now 0 8 21 4 12 +Return for a new token: Reading a token Next token is token number (1.12: 2) 1.12: syntax error, unexpected number @@ -68660,6 +59674,7 @@ Shifting token error (1.10-12: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.13: ) Shifting token ')' (1.13: ) @@ -68672,6 +59687,7 @@ -> $$ = nterm exp (1.9-13: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 112): @@ -68685,6 +59701,7 @@ Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.17: 1) Shifting token number (1.17: 1) @@ -68695,6 +59712,7 @@ -> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): @@ -68720,6 +59738,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -68728,60 +59747,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -./calc.at:1362: $PREPARSER ./calc input -input: -stderr: -input: - | (# + 1) = 1111 -./calc.at:1358: $PREPARSER ./calc input -stderr: -./calc.at:1369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS - | 1 2 -./calc.at:1363: $PREPARSER ./calc input -stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -68790,84 +59757,123 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -1.2: syntax error: invalid character: '#' Reading a token -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 Reading a token Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 118): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 138): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 23 4 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 119): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 138): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 121): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -68876,7 +59882,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -68885,33 +59890,21 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: ./calc.at:1360: cat stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1355: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1357: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -68922,83 +59915,140 @@ Entering state 4 Stack now 0 4 Return for a new token: -1.2: syntax error: invalid character: '#' Reading a token -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Return for a new token: Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Return for a new token: Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Return for a new token: +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Return for a new token: Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Return for a new token: Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -69030,48 +60080,181 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1355: cat stderr +input: + | error +./calc.at:1360: $PREPARSER ./calc input +stderr: +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +486. calc.at:1355: ok stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -stdout: -input: - | (* *) + (*) + (*) -./calc.at:1367: cat stderr -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: cat stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -69081,6 +60264,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +input: +./calc.at:1357: cat stderr + + | (* *) + (*) + (*) +./calc.at:1358: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -69089,6 +60287,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token '*' (1.2: ) 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -69102,6 +60301,7 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) @@ -69110,6 +60310,7 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) @@ -69122,16 +60323,19 @@ -> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token '*' (1.10: ) 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -69145,6 +60349,7 @@ Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) @@ -69157,6 +60362,7 @@ -> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): @@ -69170,11 +60376,13 @@ Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.15: ) Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token '*' (1.16: ) 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -69188,6 +60396,7 @@ Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) @@ -69200,6 +60409,7 @@ -> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 112): @@ -69224,6 +60434,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -69232,18 +60443,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -69253,28 +60455,92 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1368: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1362: cat stderr -./calc.at:1358: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1357: $PREPARSER ./calc input stderr: stderr: -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1363: cat stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -69283,6 +60549,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token '*' (1.2: ) 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -69296,6 +60563,7 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) @@ -69304,6 +60572,7 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) @@ -69316,16 +60585,19 @@ -> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.7: ) Shifting token '+' (1.7: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.9: ) Shifting token '(' (1.9: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token '*' (1.10: ) 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -69339,6 +60611,7 @@ Shifting token error (1.10: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) @@ -69351,6 +60624,7 @@ -> $$ = nterm exp (1.9-11: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 112): @@ -69364,11 +60638,13 @@ Shifting token '+' (1.13: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '(' (1.15: ) Shifting token '(' (1.15: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: Reading a token Next token is token '*' (1.16: ) 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' @@ -69382,6 +60658,7 @@ Shifting token error (1.16: ) Entering state 11 Stack now 0 8 21 4 11 +Return for a new token: Reading a token Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) @@ -69394,6 +60671,7 @@ -> $$ = nterm exp (1.15-17: 1111) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 7 (line 112): @@ -69418,6 +60696,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -69426,440 +60705,154 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1368: $PREPARSER ./calc input -input: -stderr: -stderr: - | (1 + # + 1) = 1111 -input: -./calc.at:1358: $PREPARSER ./calc input -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -stdout: +./calc.at:1360: cat stderr stderr: - | error -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1360: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' +Stack now 0 8 21 Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Return for a new token: -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 19 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' - | 1//2 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 -Stack now 0 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1364: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +491. calc.at:1363: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1363: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1367: cat stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -1.6: syntax error: invalid character: '#' -Reading a token -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Return for a new token: -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 -Return for a new token: Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 -Return for a new token: -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: cat stderr -stderr: -input: -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1358: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 2 -input: -./calc.at:1368: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !+ ++ - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1364: $PREPARSER ./calc input - | 1 = 2 = 3 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1367: $PREPARSER ./calc input -stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1358: cat stderr -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -69869,1025 +60862,55 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1357: cat stderr +./calc.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 20 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 + | 1 + 2 * 3 + !+ ++ +input: +./calc.at:1358: $PREPARSER ./calc input + | (#) + (#) = 2222 +stderr: +./calc.at:1357: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -70902,11 +60925,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -70917,11 +60942,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -70932,6 +60959,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -70953,11 +60981,13 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token Next token is token '+' (1.14: ) Shifting token '+' (1.14: ) @@ -70969,17 +60999,17 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -input: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) - | (1 + 1) / (1 - 1) -./calc.at:1358: $PREPARSER ./calc input -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: cat stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1362: cat stderr -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -70988,134 +61018,103 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Return for a new token: +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 -Return for a new token: Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 -Stack now 0 8 23 4 -Return for a new token: +Stack now 0 8 21 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Return for a new token: +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Return for a new token: +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Return for a new token: -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Return for a new token: + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -71124,7 +61123,6 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 -Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -71133,68 +61131,82 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr -stderr: +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1360: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Stack now 0 8 21 30 +Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 Next token is token '=' (1.11: ) @@ -71202,960 +61214,47 @@ Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Stack now 0 6 8 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Stack now 0 6 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Stack now 0 6 8 21 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Stack now 0 6 8 21 30 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 21 30 22 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Stack now 0 6 8 21 30 22 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 21 30 22 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Stack now 0 6 8 21 30 22 31 -Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Stack now 0 6 8 21 30 -Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Stack now 0 6 2 10 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Stack now 0 6 2 10 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Stack now 0 6 2 10 24 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 19 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Stack now 0 6 8 19 2 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 19 2 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 111): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Stack now 0 6 8 19 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Stack now 0 6 8 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 20 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Stack now 0 6 8 20 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 20 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Stack now 0 6 8 20 4 12 20 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Stack now 0 6 8 20 4 12 20 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Stack now 0 6 8 20 4 12 20 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 20 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Stack now 0 6 8 20 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Stack now 0 6 8 20 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Stack now 0 6 8 24 33 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Stack now 0 6 8 24 33 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Stack now 0 6 8 24 33 24 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Stack now 0 6 8 24 33 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Stack now 0 6 4 12 24 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Stack now 0 6 4 12 24 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Stack now 0 6 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Stack now 0 6 8 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Stack now 0 6 8 24 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Stack now 0 6 8 24 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Stack now 0 6 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Stack now 0 6 18 -Reducing stack by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -72170,11 +61269,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -72185,11 +61286,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -72200,6 +61303,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -72221,11 +61325,13 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token Next token is token '+' (1.14: ) Shifting token '+' (1.14: ) @@ -72237,207 +61343,8 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - | 1 = 2 = 3 -./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Return for a new token: -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Return for a new token: -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Return for a new token: -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 -Return for a new token: -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Return for a new token: -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 -Return for a new token: -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 -Return for a new token: -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 23 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 -Return for a new token: -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 -Return for a new token: -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 23 4 12 20 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 -Return for a new token: -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 113): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 126): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 -Return for a new token: -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 115): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Return for a new token: -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1362: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -input: -input: -input: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1358: "$PERL" -pi -e 'use strict; +./calc.at:1358: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -72447,91 +61354,45 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -./calc.at:1368: $PREPARSER ./calc input - | error -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1367: cat stderr input: -stderr: - | 1 2 -./calc.at:1364: $PREPARSER ./calc input -stderr: - | 1 + 2 * 3 + !- ++ -stderr: + | + | +1 ./calc.at:1360: $PREPARSER ./calc input -./calc.at:1358: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 +input: +./calc.at:1357: cat stderr stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1358: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 stderr: +input: + | (1 + #) = 1111 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -72545,11 +61406,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -72560,11 +61423,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -72575,6 +61440,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -72596,11 +61462,13 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) @@ -72612,39 +61480,135 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -488. calc.at:1358: ok -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | - | +1 +stderr: +./calc.at:1363: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 Stack now 0 -./calc.at:1367: $PREPARSER ./calc input -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stderr: -./calc.at:1362: cat stderr +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -72658,11 +61622,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -72673,11 +61639,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -72688,6 +61656,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -72709,11 +61678,13 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) @@ -72725,43 +61696,6 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token "number" (1.3: 2) -Stack now 0 -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1368: cat stderr -stderr: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -input: - ./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -72772,168 +61706,130 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | - | +1 -./calc.at:1362: $PREPARSER ./calc input -./calc.at:1363: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: stderr: -./calc.at:1364: cat stderr - | error -./calc.at:1368: $PREPARSER ./calc input +./calc.at:1360: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1367: cat stderr -./calc.at:1360: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: $PREPARSER ./calc /dev/null - | 1 = 2 = 3 -input: -stderr: -./calc.at:1363: $PREPARSER ./calc input -stderr: -stderr: -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 96): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 - | 1//2 -./calc.at:1364: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 - | 1 + 2 * 3 + !* ++ -./calc.at:1360: $PREPARSER ./calc input -stderr: +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: $PREPARSER ./calc /dev/null +./calc.at:1358: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; +input: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -72943,13 +61839,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full ... -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: mv calc.y.tmp calc.y - -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1358: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -72963,11 +61856,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -72978,11 +61873,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -72993,6 +61890,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -73014,11 +61912,13 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token Next token is token '*' (1.14: ) Shifting token '*' (1.14: ) @@ -73031,82 +61931,6 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1362: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Stack now 0 8 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1362: $PREPARSER ./calc /dev/null -input: -stderr: - | 1 = 2 = 3 -./calc.at:1368: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -73115,9 +61939,20 @@ 1.1: syntax error, unexpected end of file Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -./calc.at:1367: cat stderr -stderr: +./calc.at:1357: cat stderr +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: Starting parse Entering state 0 Stack now 0 @@ -73131,11 +61966,13 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '+' (1.3: ) Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) @@ -73146,11 +61983,13 @@ -> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 +Return for a new token: Reading a token Next token is token '*' (1.7: ) Shifting token '*' (1.7: ) Entering state 22 Stack now 0 8 21 30 22 +Return for a new token: Reading a token Next token is token number (1.9: 3) Shifting token number (1.9: 3) @@ -73161,6 +62000,7 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Stack now 0 8 21 30 22 31 +Return for a new token: Reading a token Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 114): @@ -73182,11 +62022,13 @@ Shifting token '+' (1.11: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token Next token is token '!' (1.13: ) Shifting token '!' (1.13: ) Entering state 5 Stack now 0 8 21 5 +Return for a new token: Reading a token Next token is token '*' (1.14: ) Shifting token '*' (1.14: ) @@ -73199,136 +62041,9 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1363: cat stderr -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file (1.1: ) -Stack now 0 -./calc.at:1364: cat stderr -input: -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -./calc.at:1360: cat stderr - | - | +1 -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1368: cat stderr -stderr: -input: -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 - | error -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1364: $PREPARSER ./calc input -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 - | (#) + (#) = 2222 -stderr: -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1362: cat stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 - | - | +1 -./calc.at:1368: $PREPARSER ./calc input -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1357: $PREPARSER ./calc input stderr: -./calc.at:1370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS Starting parse Entering state 0 Stack now 0 @@ -73350,90 +62065,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -73450,44 +62139,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr -input: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1362: $PREPARSER ./calc input -stderr: -stderr: -stderr: -input: -stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -73497,7 +62151,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -73519,90 +62174,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Stack now 0 8 21 4 11 26 +Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -73619,16 +62248,21 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Stack now 0 - | (!!) + (1 2) = 1 -./calc.at:1367: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1358: cat stderr +stderr: +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: Starting parse Entering state 0 Stack now 0 @@ -73946,39 +62580,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (#) + (#) = 2222 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1357: cat stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1363: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./calc.at:1363: $PREPARSER ./calc /dev/null -./calc.at:1368: $PREPARSER ./calc /dev/null -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -73987,73 +62593,206 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: +1.2: syntax error: invalid character: '#' Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Stack now 0 8 21 +Return for a new token: Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Stack now 0 8 21 4 +Return for a new token: +1.8: syntax error: invalid character: '#' Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 8 21 30 +Return for a new token: Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token number (1.15: 1) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) Shifting token number (1.15: 1) Entering state 1 Stack now 0 8 21 4 12 21 1 @@ -74296,102 +63035,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1364: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: cat stderr -stderr: -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: cat stderr -input: input: - | 1 = 2 = 3 -./calc.at:1364: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1357: $PREPARSER ./calc input stderr: - | (1 + #) = 1111 -./calc.at:1362: cat stderr -./calc.at:1360: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1368: cat stderr Starting parse Entering state 0 Stack now 0 @@ -74432,48 +63080,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -74490,71 +63154,148 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1367: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1363: cat stderr -stderr: -stderr: -input: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -stderr: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1362: $PREPARSER ./calc input +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Return for a new token: +1.2: syntax error: invalid character: '#' +Reading a token +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Return for a new token: +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Return for a new token: +1.8: syntax error: invalid character: '#' +Reading a token +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Return for a new token: +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 19 -Error: popping token '=' (1.3: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -74596,48 +63337,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 101): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -74654,6 +63411,36 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1358: cat stderr +input: + | (!!) + (1 2) = 1 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1357: cat stderr +input: + | (1 + #) = 1111 +stderr: +./calc.at:1358: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -74796,24 +63583,7 @@ Cleanup: popping nterm input (1.1-2.0: ) stderr: input: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -74822,321 +63592,252 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' +Reading a token +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (1 + 1) / (1 - 1) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1357: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 4 12 21 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1357: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 @@ -75277,38 +63978,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1367: cat stderr -./calc.at:1360: cat stderr -stderr: -./calc.at:1362: cat stderr -stderr: Starting parse Entering state 0 Stack now 0 @@ -75317,357 +63986,257 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' +Reading a token +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Return for a new token: Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 4 12 21 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 -Stack now 0 8 21 30 +Stack now 0 4 12 21 30 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 8 23 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 8 23 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Stack now 0 8 19 28 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: -input: - | (!!) + (1 2) = 1 -./calc.at:1368: $PREPARSER ./calc input -input: -input: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (# + 1) = 1111 - | (- *) + (1 2) = 1 -./calc.at:1362: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1360: $PREPARSER ./calc input -./calc.at:1367: $PREPARSER ./calc input -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -75677,11 +64246,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1360: cat stderr +./calc.at:1357: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1358: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1360: $PREPARSER ./calc input stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: +./calc.at:1357: cat stderr Starting parse Entering state 0 Stack now 0 @@ -75830,31 +64411,13 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 +input: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1358: $PREPARSER ./calc input +487. calc.at:1357: ok +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -75863,8 +64426,9 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Reading a token +Return for a new token: 1.2: syntax error: invalid character: '#' +Reading a token Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -75875,6 +64439,7 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) @@ -75883,6 +64448,7 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token number (1.6: 1) Error: discarding token number (1.6: 1) @@ -75891,6 +64457,7 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -75903,11 +64470,13 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -75918,6 +64487,7 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -75942,6 +64512,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -75950,19 +64521,6 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: cat stderr -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -stderr: -stderr: -stderr: Starting parse Entering state 0 Stack now 0 @@ -76111,7 +64669,9 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -76121,9 +64681,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stdout: Starting parse Entering state 0 Stack now 0 @@ -76132,8 +64691,9 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 -Reading a token +Return for a new token: 1.2: syntax error: invalid character: '#' +Reading a token Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 @@ -76144,6 +64704,7 @@ Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Error: discarding token '+' (1.4: ) @@ -76152,6 +64713,7 @@ Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token number (1.6: 1) Error: discarding token number (1.6: 1) @@ -76160,6 +64722,7 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) @@ -76172,11 +64735,13 @@ -> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.9: ) Shifting token '=' (1.9: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.11-14: 1111) Shifting token number (1.11-14: 1111) @@ -76187,6 +64752,7 @@ -> $$ = nterm exp (1.11-14: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 102): @@ -76211,6 +64777,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -76219,165 +64786,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | (!!) + (1 2) = 1 -./calc.at:1363: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1368: cat stderr -stderr: -./calc.at:1364: cat stderr -./calc.at:1360: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./calc.at:1362: "$PERL" -pi -e 'use strict; + +./calc.at:1362: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1360: cat stderr +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -76387,31 +64799,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $PREPARSER ./calc /dev/null -./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 - | (- *) + (1 2) = 1 -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1360: cat stderr -stderr: -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: cat stderr -./calc.at:1369: "$PERL" -ne ' +./calc.at:1362: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -76420,162 +64808,13 @@ || /\s$/ # No tabs. || /\t/ - )' calc.c + )' calc.c calc.h -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1367: cat stderr -stderr: input: +./calc.at:1358: cat stderr input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Stack now 0 - | (1 + # + 1) = 1111 + | (* *) + (*) + (*) +./calc.at:1360: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -76589,157 +64828,8 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1360: $PREPARSER ./calc input -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -input: -input: -stderr: - | (* *) + (*) + (*) -stderr: ./calc.at:1362: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token end of file (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1363: cat stderr stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -76891,25 +64981,1038 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: cat stderr input: + | (1 + # + 1) = 1111 +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (- *) + (1 2) = 1 -stderr: -./calc.at:1363: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 +Stack now 0 6 8 21 +Reading a token +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -76920,13 +66023,15 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 -Reading a token +Return for a new token: 1.6: syntax error: invalid character: '#' +Reading a token Error: popping token '+' (1.4: ) Stack now 0 4 12 Error: popping nterm exp (1.2: 1) @@ -76941,6 +66046,7 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token '+' (1.8: ) Error: discarding token '+' (1.8: ) @@ -76949,6 +66055,7 @@ Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token number (1.10: 1) Error: discarding token number (1.10: 1) @@ -76957,6 +66064,7 @@ Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) @@ -76969,11 +66077,13 @@ -> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '=' (1.13: ) Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token Next token is token number (1.15-18: 1111) Shifting token number (1.15-18: 1111) @@ -76984,6 +66094,7 @@ -> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 102): @@ -77008,6 +66119,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -77016,10 +66128,10 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +492. calc.at:1364: testing Calculator %no-lines api.pure parse.error=verbose %debug %locations %defines api.prefix={calc} %verbose %yacc %parse-param {semantic_value *result}{int *count}{int *nerrs} ... Starting parse Entering state 0 Stack now 0 @@ -77171,10 +66283,14 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (* *) + (*) + (*) -./calc.at:1368: $PREPARSER ./calc input -stderr: -input: +./calc.at:1364: + if "$POSIXLY_CORRECT_IS_EXPORTED"; then + sed -e '/\/\* !POSIX \*\//d' calc.y.tmp >calc.y + else + mv calc.y.tmp calc.y + fi + + stderr: Starting parse Entering state 0 @@ -77184,163 +66300,123 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Return for a new token: Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Return for a new token: +1.6: syntax error: invalid character: '#' +Reading a token +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) +Stack now 0 4 11 +Return for a new token: +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 4 11 +Return for a new token: Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Return for a new token: +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 +Return for a new token: Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 +Return for a new token: Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1364: $PREPARSER ./calc input +stderr: ./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -77351,819 +66427,1026 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: - | 1 + 2 * 3 + !- ++ -input: -stderr: -./calc.at:1367: $PREPARSER ./calc input -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 31 -Stack now 0 8 21 4 12 22 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1360: cat stderr -stderr: -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 21 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 2 -./calc.at:1362: cat stderr -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr -stderr: -input: -stderr: -stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 8 21 +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 21 4 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) Entering state 1 -Stack now 0 8 21 4 12 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Stack now 0 8 21 4 12 21 30 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Stack now 0 8 21 4 12 21 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 21 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 101): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 124): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 8 20 4 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 21 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 21 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 8 21 4 +Stack now 0 6 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 -Stack now 0 8 21 4 12 +Stack now 0 6 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 -Stack now 0 8 21 4 12 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Stack now 0 8 21 4 12 22 31 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Stack now 0 8 21 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Stack now 0 8 21 4 12 22 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 21 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 21 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 21 4 11 +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Stack now 0 8 21 30 +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 125): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 92): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (1 + 1) / (1 - 1) -./calc.at:1360: $PREPARSER ./calc input -input: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1368: $PREPARSER ./calc input -stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1364: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -78173,12 +67456,149 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ +./calc.at:1360: cat stderr +stderr: +input: +stdout: + | 1 2 ./calc.at:1362: $PREPARSER ./calc input -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: cat stderr +./types.at:139: $PREPARSER ./test +input: stderr: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1360: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (1 + 1) / (1 - 1) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1358: $PREPARSER ./calc input +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -78187,6 +67607,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -78197,11 +67618,13 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 +Return for a new token: Reading a token Next token is token number (1.6: 1) Shifting token number (1.6: 1) @@ -78212,6 +67635,7 @@ -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 112): @@ -78232,16 +67656,19 @@ -> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) Entering state 23 Stack now 0 8 23 +Return for a new token: Reading a token Next token is token '(' (1.11: ) Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 23 4 +Return for a new token: Reading a token Next token is token number (1.12: 1) Shifting token number (1.12: 1) @@ -78252,11 +67679,13 @@ -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 +Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 20 Stack now 0 8 23 4 12 20 +Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -78267,6 +67696,7 @@ -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (1.17: ) Reducing stack by rule 8 (line 113): @@ -78287,6 +67717,7 @@ -> $$ = nterm exp (1.11-17: 0) Entering state 32 Stack now 0 8 23 32 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 10 (line 115): @@ -78312,6 +67743,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -78320,6 +67752,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1358: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -78400,7 +67834,9 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1360: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -78410,98 +67846,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: cat stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1367: cat stderr -stderr: -./calc.at:1364: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 114): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 112): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 130): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS Starting parse Entering state 0 Stack now 0 @@ -78510,6 +67855,7 @@ Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 +Return for a new token: Reading a token Next token is token number (1.2: 1) Shifting token number (1.2: 1) @@ -78520,11 +67866,13 @@ -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 +Return for a new token: Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Stack now 0 4 12 21 +Return for a new token: Reading a token Next token is token number (1.6: 1) Shifting token number (1.6: 1) @@ -78535,6 +67883,7 @@ -> $$ = nterm exp (1.6: 1) Entering state 30 Stack now 0 4 12 21 30 +Return for a new token: Reading a token Next token is token ')' (1.7: ) Reducing stack by rule 7 (line 112): @@ -78555,16 +67904,19 @@ -> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 +Return for a new token: Reading a token Next token is token '/' (1.9: ) Shifting token '/' (1.9: ) Entering state 23 Stack now 0 8 23 +Return for a new token: Reading a token Next token is token '(' (1.11: ) Shifting token '(' (1.11: ) Entering state 4 Stack now 0 8 23 4 +Return for a new token: Reading a token Next token is token number (1.12: 1) Shifting token number (1.12: 1) @@ -78575,11 +67927,13 @@ -> $$ = nterm exp (1.12: 1) Entering state 12 Stack now 0 8 23 4 12 +Return for a new token: Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) Entering state 20 Stack now 0 8 23 4 12 20 +Return for a new token: Reading a token Next token is token number (1.16: 1) Shifting token number (1.16: 1) @@ -78590,6 +67944,7 @@ -> $$ = nterm exp (1.16: 1) Entering state 29 Stack now 0 8 23 4 12 20 29 +Return for a new token: Reading a token Next token is token ')' (1.17: ) Reducing stack by rule 8 (line 113): @@ -78610,6 +67965,7 @@ -> $$ = nterm exp (1.11-17: 0) Entering state 32 Stack now 0 8 23 32 +Return for a new token: Reading a token Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 10 (line 115): @@ -78635,6 +67991,7 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 +Return for a new token: Reading a token Now at end of input. Shifting token end of file (2.1: ) @@ -78644,19 +68001,11 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) input: - | (* *) + (*) + (*) -input: -input: -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1369: cat stderr +./calc.at:1362: cat stderr | 1 + 2 * 3 + !- ++ -./calc.at:1368: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -input: -./calc.at:1367: $PREPARSER ./calc input -stderr: +./calc.at:1360: $PREPARSER ./calc input stderr: -./calc.at:1360: "$PERL" -pi -e 'use strict; +./calc.at:1358: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -78666,308 +68015,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | (!!) + (1 2) = 1 -input: -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !- ++ -stderr: -./calc.at:1362: $PREPARSER ./calc input -memory exhausted -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -79048,22 +68095,14 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1364: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1360: cat stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1//2 -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: $PREPARSER ./calc input -stderr: -memory exhausted -stderr: -stderr: +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1358: cat stderr stderr: stderr: -489. calc.at:1360: ok -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -79148,296 +68187,32 @@ Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Stack now 0 4 5 16 -Reducing stack by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 21 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Stack now 0 8 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 +Error: popping nterm exp (1.1: 1) Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1368: cat stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +488. calc.at:1358: ok +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -79448,18 +68223,36 @@ }eg ' expout || exit 77 stderr: +./calc.at:1360: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 input: -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1363: "$PERL" -pi -e 'use strict; +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -79469,134 +68262,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !* ++ -./calc.at:1368: $PREPARSER ./calc input -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stderr: -stderr: -./calc.at:1367: cat stderr -./calc.at:1363: cat stderr -./calc.at:1362: cat stderr -1.14: memory exhausted -./calc.at:1364: cat stderr -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1369: cat stderr -stderr: -input: -input: - | (#) + (#) = 2222 -./calc.at:1367: $PREPARSER ./calc input -1.14: memory exhausted -input: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1363: $PREPARSER ./calc input -stderr: | 1 + 2 * 3 + !* ++ -./calc.at:1362: $PREPARSER ./calc input -stderr: - | (- *) + (1 2) = 1 -./calc.at:1370: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1364: $PREPARSER ./calc input +./calc.at:1360: $PREPARSER ./calc input stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | error Starting parse Entering state 0 Stack now 0 @@ -79678,187 +68347,9 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1369: $PREPARSER ./calc input -stderr: +./calc.at:1362: cat stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1368: cat stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error: invalid character: '#' -497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full ... -./calc.at:1371: mv calc.y.tmp calc.y - -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -79941,247 +68432,24 @@ Stack now 0 8 21 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -input: + | error +./calc.at:1362: $PREPARSER ./calc input +stderr: +493. calc.at:1367: testing Calculator parse.error=custom ... +./calc.at:1367: mv calc.y.tmp calc.y + stderr: +./calc.at:1367: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stdout: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Stack now 0 8 21 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 21 4 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 21 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (#) + (#) = 2222 -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: $PREPARSER ./calc input -stderr: -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1367: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1362: "$PERL" -pi -e 'use strict; +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -80191,10 +68459,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: ./check +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y +./calc.at:1360: cat stderr stderr: -./calc.at:1364: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +input: +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -80204,364 +68483,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -./calc.at:1364: cat stderr -./calc.at:1369: cat stderr -./calc.at:1362: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: - | (1 + #) = 1111 -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -input: - | 1 2 -./calc.at:1370: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1368: cat stderr -./calc.at:1364: $PREPARSER ./calc input -input: -input: -stderr: | (#) + (#) = 2222 -./calc.at:1362: $PREPARSER ./calc input - | 1 = 2 = 3 -stderr: -./calc.at:1369: $PREPARSER ./calc input -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -stderr: -syntax error: invalid character: '#' +./calc.at:1360: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (1 + #) = 1111 -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -80679,180 +68607,12 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 21 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 21 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: -./calc.at:1371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1362: cat stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: + | 1 = 2 = 3 +./calc.at:1362: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -80974,228 +68734,115 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -1.6: syntax error: invalid character: '#' -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1367: cat stderr -./calc.at:1363: cat stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1364: cat stderr -./calc.at:1370: cat stderr -./calc.at:1369: cat stderr -./calc.at:1368: cat stderr -./calc.at:1362: cat stderr -input: -input: -input: - | - | +1 - | (# + 1) = 1111 -./calc.at:1367: $PREPARSER ./calc input -./calc.at:1369: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1363: $PREPARSER ./calc input -input: -stderr: -stderr: - | (# + 1) = 1111 -input: -./calc.at:1368: $PREPARSER ./calc input -input: stderr: - | 1 + 2 * 3 + !+ ++ -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1364: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1362: $PREPARSER ./calc input -syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1370: $PREPARSER ./calc input -stderr: -stderr: +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1360: cat stderr +./calc.at:1367: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1360: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -81294,106 +68941,13 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: stdout: -1.2: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1362: cat stderr +./types.at:139: $PREPARSER ./test +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -81492,106 +69046,38 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./types.at:139: $PREPARSER ./test +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1362: $PREPARSER ./calc input +stderr: +443. types.at:139: ok Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 21 5 14 -Reducing stack by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1368: cat stderr -./calc.at:1369: cat stderr -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1367: cat stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -81601,210 +69087,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1369: $PREPARSER ./calc /dev/null -input: -======== Testing with C++ standard flags: '' +./calc.at:1360: cat stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1362: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1370: cat stderr -input: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1363: cat stderr input: - | (1 + # + 1) = 1111 -stderr: -./calc.at:1368: $PREPARSER ./calc input -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 96): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (1 + # + 1) = 1111 -./calc.at:1367: $PREPARSER ./calc input -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -input: -1.6: syntax error: invalid character: '#' -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | error -./calc.at:1370: $PREPARSER ./calc input -input: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -syntax error: invalid character: '#' - | (#) + (#) = 2222 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 | (# + 1) = 1111 -./calc.at:1362: $PREPARSER ./calc input -stderr: -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -1.6: syntax error: invalid character: '#' +./calc.at:1360: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Stack now 0 8 21 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Stack now 0 8 21 30 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Stack now 0 8 21 30 22 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Stack now 0 8 21 30 22 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Stack now 0 8 21 30 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 21 5 13 -Reducing stack by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) + Starting parse Entering state 0 Stack now 0 @@ -81900,137 +69215,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr -stderr: -./calc.at:1369: cat stderr -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -82041,10 +69226,23 @@ }eg ' expout || exit 77 stderr: -input: - | (1 + 1) / (1 - 1) -./calc.at:1368: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1363: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1362: cat stderr stderr: +./calc.at:1363: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1362: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Stack now 0 @@ -82140,148 +69338,32 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 21 4 -Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) -Stack now 0 8 21 4 -Shifting token error (1.8: ) -Entering state 11 -Stack now 0 8 21 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Stack now 0 8 21 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1364: cat stderr -1.11-17: error: null divisor -./calc.at:1370: cat stderr -./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1367: cat stderr -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1360: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -82292,29 +69374,9 @@ }eg ' expout || exit 77 stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -1.11-17: error: null divisor -input: -input: -input: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1367: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1363: cat stderr -stderr: - | 1 = 2 = 3 -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1362: cat stderr -error: null divisor -stderr: -./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1368: cat stderr +494. calc.at:1368: testing Calculator parse.error=custom %locations api.prefix={calc} ... +./calc.at:1368: mv calc.y.tmp calc.y + stderr: Starting parse Entering state 0 @@ -82360,7 +69422,7 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.11: ) +Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -82368,7 +69430,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -82376,165 +69438,986 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Stack now 0 8 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -input: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor - | (1 + # + 1) = 1111 -./calc.at:1362: $PREPARSER ./calc input -input: -494. calc.at:1368: ok -stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (1 + #) = 1111 -./calc.at:1363: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 6 8 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 127): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 19 -Stack now 0 8 19 +Stack now 0 6 8 19 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 101): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 28 -Stack now 0 8 19 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 102): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 97): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 91): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token end of file (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token end of file (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stdout: +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file (1.1: ) +Stack now 0 +./calc.at:1368: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1360: cat stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Stack now 0 @@ -82579,7 +70462,7 @@ Entering state 31 Stack now 0 8 21 30 22 31 Reading a token -Next token is token '+' (1.11: ) +Next token is token '=' (1.11: ) Reducing stack by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -82587,7 +70470,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 30 Stack now 0 8 21 30 -Next token is token '+' (1.11: ) +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -82595,28 +70478,968 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 8 21 +Stack now 0 6 8 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 21 5 +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Stack now 0 6 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Stack now 0 8 21 5 15 -Reducing stack by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Stack now 0 8 21 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./types.at:139: ./check +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Stack now 0 6 8 20 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Stack now 0 6 8 20 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Stack now 0 6 8 20 4 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Stack now 0 6 8 20 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Stack now 0 6 8 20 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (1 + # + 1) = 1111 +./calc.at:1360: $PREPARSER ./calc input +./calc.at:1362: cat stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -82626,12 +71449,12 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) -> $$ = nterm exp (1.2: 1) Entering state 12 Stack now 0 4 12 @@ -82649,78 +71472,118 @@ Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) Error: popping token error (1.2-6: ) Stack now 0 4 Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: + | 1 2 +input: +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1362: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 stderr: -./calc.at:1369: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) stderr: -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -82835,34 +71698,7 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1367: cat stderr -input: -./calc.at:1370: cat stderr - | (!!) + (1 2) = 1 -stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1369: $PREPARSER ./calc input -493. calc.at:1367: ok +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -82872,127 +71708,337 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: cat stderr -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1362: cat stderr -input: - | - | +1 -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: $PREPARSER ./calc input -input: -stderr: - | (#) + (#) = 2222 -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 - | (1 + 1) / (1 - 1) Starting parse Entering state 0 Stack now 0 @@ -83002,122 +72048,353 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Stack now 0 8 21 4 -Shifting token error (1.8: ) +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack by rule 5 (line 101): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1362: $PREPARSER ./calc input +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr +./calc.at:1362: cat stderr +input: +./calc.at:1363: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1360: $PREPARSER ./calc input stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1368: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS Starting parse Entering state 0 Stack now 0 @@ -83259,13 +72536,11 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: cat stderr - -stderr: + | (!!) + (1 2) = 1 +input: +./calc.at:1362: $PREPARSER ./calc input stderr: -./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1369: cat stderr +./calc.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -83275,92 +72550,111 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Stack now 0 8 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 21 4 Reading a token -1.8: syntax error: invalid character: '#' -Shifting token error (1.8: ) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 -Next token is token "invalid token" (1.8: ) -Error: discarding token "invalid token" (1.8: ) -Error: popping token error (1.8: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 21 4 -Shifting token error (1.8: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Stack now 0 8 21 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Stack now 0 8 21 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 @@ -83368,33 +72662,57 @@ Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): +Reducing stack by rule 4 (line 97): $1 = nterm exp (1.1-16: 2222) $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union ... -input: -./calc.at:1374: mv calc.y.tmp calc.y - -stderr: - | (# + 1) = 1111 + | 1//2 ./calc.at:1363: $PREPARSER ./calc input -./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -83536,32 +72854,8 @@ Stack now 0 6 17 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1369: $PREPARSER ./calc input +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1362: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -83571,109 +72865,203 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 129): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: $PREPARSER ./calc /dev/null -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1364: cat stderr -499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed ... -./calc.at:1375: mv calc.y.tmp calc.y - stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1360: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1360: cat stderr ./calc.at:1362: cat stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +489. calc.at:1360: ok input: +./calc.at:1363: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1362: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -83683,101 +73071,151 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: + +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1363: $PREPARSER ./calc input stderr: -490. calc.at:1362: 1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 - ok - | (1 + #) = 1111 -./calc.at:1364: $PREPARSER ./calc input -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: Starting parse Entering state 0 @@ -83788,96 +73226,155 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 128): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) +Reducing stack by rule 5 (line 101): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -83887,115 +73384,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: cat stderr -./calc.at:1370: cat stderr stderr: -./calc.at:1363: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -input: -./calc.at:1374: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -input: -input: -./calc.at:1364: "$PERL" -pi -e 'use strict; +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +./calc.at:1362: cat stderr +495. calc.at:1369: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -84005,14 +73405,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1369: mv calc.y.tmp calc.y + +input: | (* *) + (*) + (*) -./calc.at:1369: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1363: $PREPARSER ./calc input -./calc.at:1370: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1369: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1363: cat stderr stderr: Starting parse Entering state 0 @@ -84023,268 +73422,196 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-6: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-8: ) +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1364: cat stderr -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: + | 1 = 2 = 3 +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1367: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 -Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -500. calc.at:1387: testing Calculator %glr-parser ... -./calc.at:1387: mv calc.y.tmp calc.y - -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1364: $PREPARSER ./calc input -./calc.at:1375: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -./calc.at:1369: cat stderr +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -84294,176 +73621,347 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): +Reducing stack by rule 14 (line 127): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Stack now 0 8 19 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Stack now 0 8 19 1 -Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Stack now 0 8 19 28 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1370: cat stderr -./calc.at:1363: cat stderr -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1367: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.2: ) -Error: discarding token "invalid token" (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1362: cat stderr +stderr: +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1363: cat stderr +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1369: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | + | +1 +input: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: $PREPARSER ./calc input + | 1 2 +./calc.at:1367: $PREPARSER ./calc input +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -84473,27 +73971,558 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !+ ++ +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 130): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1362: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: input: -./calc.at:1369: $PREPARSER ./calc input - | (!!) + (1 2) = 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 + | 1 + 2 * 3 + !- ++ +./calc.at:1362: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1367: cat stderr +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1363: cat stderr input: -./calc.at:1370: $PREPARSER ./calc input +stderr: + | 1//2 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1363: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 131): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: stderr: - | (1 + 1) / (1 - 1) +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +./calc.at:1362: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stdout: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1364: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1362: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1367: cat stderr +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1362: $PREPARSER ./calc input +stderr: +./calc.at:1363: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1364: $PREPARSER ./calc input +input: +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 114): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 132): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | error ./calc.at:1363: $PREPARSER ./calc input -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +./calc.at:1367: $PREPARSER ./calc input +stderr: stderr: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -84503,122 +74532,298 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 4 12 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 -Stack now 0 4 12 21 1 +Stack now 0 8 21 4 12 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 -Stack now 0 4 12 21 30 +Stack now 0 8 21 4 12 21 30 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Stack now 0 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 8 21 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 8 21 4 12 22 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -84635,7 +74840,8 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1364: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -84645,168 +74851,1029 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1364: cat stderr -./calc.at:1387: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1370: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 6 8 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 -Stack now 0 4 12 21 1 +Stack now 0 6 8 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 -Stack now 0 4 12 21 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token ')' (1.7: ) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 27 -Stack now 0 4 12 27 +Stack now 0 6 4 12 27 Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 6 8 20 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 6 8 20 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 6 8 20 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 20 -Stack now 0 8 23 4 12 20 +Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 6 8 20 4 12 20 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 29 -Stack now 0 8 23 4 12 20 29 +Stack now 0 6 8 20 4 12 20 29 Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' (10.11: ) Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 27 -Stack now 0 8 23 4 12 27 +Stack now 0 6 8 20 4 12 27 Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Stack now 0 6 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (1 + # + 1) = 1111 -input: -./calc.at:1364: $PREPARSER ./calc input +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: cat stderr stderr: -input: - | (- *) + (1 2) = 1 - | 1 + 2 * 3 + !- ++ -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1369: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -84816,95 +75883,298 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 21 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 21 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 4 12 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 21 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Stack now 0 4 11 26 +Stack now 0 8 21 4 11 26 Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -84921,125 +76191,100 @@ Stack now 0 6 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1363: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1363: cat stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +input: +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 8 21 Reading a token -1.6: syntax error: invalid character: '#' -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token "invalid token" (1.6: ) -Error: discarding token "invalid token" (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Stack now 0 4 11 26 -Reducing stack by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token '=' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Stack now 0 8 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Stack now 0 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Stack now 0 8 19 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Stack now 0 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -85049,360 +76294,1050 @@ Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -491. calc.at:1363: ok -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1370: cat stderr -./calc.at:1364: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1369: cat stderr -input: -./calc.at:1364: cat stderr - | (* *) + (*) + (*) -./calc.at:1370: $PREPARSER ./calc input -stderr: -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1369: $PREPARSER ./calc input -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -input: -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1364: $PREPARSER ./calc input -1.14: memory exhausted -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 -Stack now 0 4 12 21 +Stack now 0 6 8 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 -Stack now 0 4 12 21 1 +Stack now 0 6 8 21 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 30 -Stack now 0 4 12 21 30 +Stack now 0 6 8 21 30 Reading a token -Next token is token ')' (1.7: ) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 +Stack now 0 6 8 21 30 22 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 21 30 22 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Stack now 0 6 8 21 30 22 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 21 30 22 2 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Stack now 0 6 8 21 30 22 31 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Stack now 0 6 8 21 30 +Next token is token '=' (2.12: ) Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Stack now 0 6 2 10 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Stack now 0 6 2 10 24 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 27 -Stack now 0 4 12 27 +Stack now 0 6 4 12 27 Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Stack now 0 6 8 24 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 23 4 +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Stack now 0 6 8 24 33 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token '=' (5.8: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 6 8 19 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 23 4 12 + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) +Next token is token '\n' (5.11-6.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 20 -Stack now 0 8 23 4 12 20 +Stack now 0 6 8 20 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 29 -Stack now 0 8 23 4 12 20 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token ')' (1.17: ) +Next token is token '-' (9.7: ) Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Stack now 0 8 23 4 12 27 -Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Stack now 0 6 8 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Stack now 0 6 8 19 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Stack now 0 6 8 19 2 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 19 2 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Stack now 0 6 8 19 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 25 -Stack now 0 8 25 +Stack now 0 6 8 25 Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Stack now 0 6 17 -Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: -stderr: -501. calc.at:1389: testing Calculator %glr-parser %header ... -./calc.at:1371: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1389: mv calc.y.tmp calc.y - -./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.14: memory exhausted -./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1370: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Stack now 0 6 8 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 4 +Stack now 0 6 8 20 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 -Stack now 0 4 1 +Stack now 0 6 8 20 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 4 12 +Stack now 0 6 8 20 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Stack now 0 4 12 21 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 +Stack now 0 6 8 20 4 12 20 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 -Stack now 0 4 12 21 1 +Stack now 0 6 8 20 4 12 20 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Stack now 0 4 12 21 30 + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 +Stack now 0 6 8 20 4 12 20 29 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Stack now 0 6 8 20 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 27 -Stack now 0 4 12 27 +Stack now 0 6 8 20 4 12 27 Reducing stack by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 +Stack now 0 6 8 20 29 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Stack now 0 8 23 +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 +Stack now 0 6 8 24 33 24 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Stack now 0 6 8 24 33 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 +Stack now 0 6 8 24 33 24 33 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Stack now 0 6 8 24 33 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 8 23 4 +Stack now 0 6 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 -Stack now 0 8 23 4 1 +Stack now 0 6 4 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 -Stack now 0 8 23 4 12 +Stack now 0 6 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Stack now 0 8 23 4 12 20 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 +Stack now 0 6 4 12 24 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 -Stack now 0 8 23 4 12 20 1 +Stack now 0 6 4 12 24 1 Reducing stack by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Stack now 0 8 23 4 12 20 29 + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 +Stack now 0 6 4 12 24 33 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Stack now 0 8 23 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 27 -Stack now 0 8 23 4 12 27 +Stack now 0 6 4 12 27 Reducing stack by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 -Stack now 0 8 23 32 + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Stack now 0 6 8 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Stack now 0 6 8 24 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Stack now 0 6 8 24 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Stack now 0 6 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Stack now 0 6 18 +Reducing stack by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (#) + (#) = 2222 +./calc.at:1362: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Stack now 0 8 25 -Reducing stack by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 78): +Reducing stack by rule 1 (line 91): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of file (2.1: ) Entering state 17 Stack now 0 6 17 Stack now 0 6 17 -Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1371: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1364: "$PERL" -pi -e 'use strict; +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85412,390 +77347,167 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1369: cat stderr -input: -input: - | 1 + 2 * 3 + !+ ++ - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1364: cat stderr -stderr: -stderr: -input: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -492. calc.at:1364: ok - | (#) + (#) = 2222 -./calc.at:1369: $PREPARSER ./calc input -stderr: -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -input: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' - | 1 + 2 * 3 + !- ++ +./calc.at:1363: cat stderr | 1 2 -./calc.at:1370: $PREPARSER ./calc input -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1389: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: -stderr: -./calc.at:1369: cat stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (1 + #) = 1111 -./calc.at:1369: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc input stderr: +./calc.at:1367: cat stderr stderr: -1.6: syntax error: invalid character: '#' -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -502. calc.at:1390: testing Calculator %glr-parser %locations ... -./calc.at:1390: mv calc.y.tmp calc.y - -./calc.at:1370: cat stderr -stderr: -./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -1.6: syntax error: invalid character: '#' -./calc.at:1371: cat stderr -input: -./calc.at:1369: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.14: memory exhausted -input: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1371: $PREPARSER ./calc input -input: -stderr: - | (# + 1) = 1111 -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1369: $PREPARSER ./calc input -1.14: memory exhausted -stderr: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1370: cat stderr -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stderr: -stderr: -input: -1.2: syntax error: invalid character: '#' -stdout: - | (#) + (#) = 2222 -./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1371: cat stderr -./calc.at:1374: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1369: cat stderr -input: -stderr: -input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token 1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token 1.8: syntax error: invalid character: '#' - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1374: $PREPARSER ./calc input -input: - | error -stderr: - | (1 + # + 1) = 1111 -./calc.at:1369: $PREPARSER ./calc input -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1370: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (1 + #) = 1111 -./calc.at:1370: $PREPARSER ./calc input -stderr: -stderr: -1.6: syntax error: invalid character: '#' -stderr: -1.6: syntax error: invalid character: '#' -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1374: $PREPARSER ./calc input -stderr: -./calc.at:1369: cat stderr -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -stderr: -input: -syntax error - | (1 + 1) / (1 - 1) -input: -./calc.at:1369: $PREPARSER ./calc input -stderr: - | 1 = 2 = 3 -./calc.at:1371: $PREPARSER ./calc input -1.11-17: error: null divisor -./calc.at:1370: cat stderr -stderr: -./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -stderr: -stdout: -stderr: -./types.at:139: $PREPARSER ./test -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.11-17: error: null divisor -stdout: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1375: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' -stderr: - | (# + 1) = 1111 -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1369: cat stderr -./calc.at:1375: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -1.2: syntax error: invalid character: '#' -======== Testing with C++ standard flags: '' -./calc.at:1374: cat stderr -./calc.at:1371: cat stderr -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -495. calc.at:1369: ok -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1375: $PREPARSER ./calc input -input: -stderr: -input: -1.2: syntax error: invalid character: '#' -stderr: - | - | +1 - | 1//2 -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1370: cat stderr - -syntax error -stderr: -input: -input: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) - | 1 2 -./calc.at:1375: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1370: $PREPARSER ./calc input -stderr: -1.3: syntax error, unexpected number -stderr: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1371: cat stderr -stderr: +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) 1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 stderr: stdout: -1.6: syntax error: invalid character: '#' -503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span} ... -./types.at:139: $PREPARSER ./test -./calc.at:1371: $PREPARSER ./calc /dev/null -./calc.at:1391: mv calc.y.tmp calc.y - -stderr: -stderr: -./calc.at:1370: cat stderr -./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr -======== Testing with C++ standard flags: '' -input: -stderr: -input: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | (1 + 1) / (1 - 1) -./calc.at:1370: $PREPARSER ./calc input - | error input: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test + | (!!) + (1 2) = 1 +./calc.at:1363: $PREPARSER ./calc input stderr: -./calc.at:1374: $PREPARSER ./calc input - | 1//2 -1.11-17: error: null divisor -./calc.at:1375: $PREPARSER ./calc input -stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1371: cat stderr -stdout: -stderr: -syntax error -./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: ./check -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1371: $PREPARSER ./calc input -1.11-17: error: null divisor -syntax error -stderr: -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1370: cat stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -496. calc.at:1370: ok -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85805,153 +77517,176 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1391: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1375: cat stderr -./calc.at:1371: cat stderr -./calc.at:1374: cat stderr -stderr: -stdout: - -./types.at:139: ./check -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -input: -input: - | error -input: -./calc.at:1375: $PREPARSER ./calc input | 1 = 2 = 3 -./calc.at:1374: $PREPARSER ./calc input -stderr: - | (!!) + (1 2) = 1 -stderr: -./calc.at:1371: $PREPARSER ./calc input -1.1: syntax error, unexpected invalid token -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -stderr: -stderr: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error, unexpected invalid token -syntax error -504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc" ... -./calc.at:1392: mv calc.y.tmp calc.y - -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +./calc.at:1367: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token "number" (1.3: 2) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) 1.1-16: error: 2222 != 1 -./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1375: cat stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1371: cat stderr -input: -input: - | 1 = 2 = 3 - | (- *) + (1 2) = 1 -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: cat stderr -stderr: -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -1.7: syntax error, unexpected '=' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1374: $PREPARSER ./calc input -stderr: -stderr: -stderr: -1.7: syntax error, unexpected '=' -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1392: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -stderr: -syntax error -./calc.at:1371: cat stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1375: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -input: - | - | +1 -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1374: cat stderr -2.1: syntax error, unexpected '+' -./calc.at:1374: $PREPARSER ./calc /dev/null -./calc.at:1371: cat stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1362: cat stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +449. types.at:139: ok +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -85962,35 +77697,156 @@ }eg ' expout || exit 77 stderr: -syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: input: -syntax error - | 1 + 2 * 3 + !+ ++ -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1375: $PREPARSER ./calc /dev/null -stderr: -stderr: -1.1: syntax error, unexpected end of file -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: cat stderr stderr: -1.1: syntax error, unexpected end of file -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1362: $PREPARSER ./calc input +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) stderr: -./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86000,17 +77856,110 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1368: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1374: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1371: $PREPARSER ./calc input -stderr: -./calc.at:1375: cat stderr -stderr: -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -input: -./calc.at:1387: "$PERL" -ne ' +./calc.at:1363: cat stderr + +./calc.at:1368: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -86021,29 +77970,142 @@ || /\t/ )' calc.c + | 1//2 +./calc.at:1364: $PREPARSER ./calc input stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1374: $PREPARSER ./calc input -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: $PREPARSER ./calc input stderr: +./calc.at:1367: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +input: + | (- *) + (1 2) = 1 +./calc.at:1363: $PREPARSER ./calc input input: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1371: cat stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -86057,38 +78119,8 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1387: $PREPARSER ./calc input -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -stderr: -stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1371: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.14: memory exhausted -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86098,49 +78130,354 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: stderr: - | 1 2 -./calc.at:1387: $PREPARSER ./calc input -1.14: memory exhausted -stderr: -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr -./calc.at:1374: cat stderr -./calc.at:1371: cat stderr -stderr: -input: -input: -syntax error - | (!!) + (1 2) = 1 -./calc.at:1375: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 -./calc.at:1374: $PREPARSER ./calc input -stderr: - | (#) + (#) = 2222 -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: $PREPARSER ./calc input -stderr: + | + | +1 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: $PREPARSER ./calc input stderr: -syntax error -error: 2222 != 1 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: cat stderr stderr: +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stdout: -./types.at:139: $PREPARSER ./test -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +496. calc.at:1370: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full ... +input: +./calc.at:1370: mv calc.y.tmp calc.y + + | (# + 1) = 1111 +./calc.at:1362: $PREPARSER ./calc input +./calc.at:1370: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86150,31 +78487,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -error: 2222 != 1 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -======== Testing with C++ standard flags: '' -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./calc.at:1371: cat stderr -./calc.at:1387: cat stderr -./calc.at:1375: cat stderr input: - | (1 + #) = 1111 -./calc.at:1371: $PREPARSER ./calc input -./calc.at:1374: "$PERL" -pi -e 'use strict; +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86184,58 +78499,221 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1//2 -./calc.at:1387: $PREPARSER ./calc input -input: - | (- *) + (1 2) = 1 -stderr: -./calc.at:1375: $PREPARSER ./calc input -stderr: -stderr: -1.6: syntax error: invalid character: '#' -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr + | 1 2 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1368: $PREPARSER ./calc input stderr: +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: cat stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1363: cat stderr +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: cat stderr stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -syntax error stderr: -1.6: syntax error: invalid character: '#' +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1367: $PREPARSER ./calc /dev/null input: - | (- *) + (1 2) = 1 -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: + | error +./calc.at:1364: $PREPARSER ./calc input + | (* *) + (*) + (*) stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: cat stderr +./calc.at:1363: $PREPARSER ./calc input stderr: -./calc.at:1375: cat stderr -syntax error -syntax error -error: 2222 != 1 stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86245,150 +78723,459 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./types.at:139: ./check -input: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y - | (# + 1) = 1111 -./calc.at:1371: $PREPARSER ./calc input -input: -stderr: - | (* *) + (*) + (*) -1.2: syntax error: invalid character: '#' -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1368: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1362: cat stderr +input: stderr: -stdout: -1.2: syntax error: invalid character: '#' stderr: -./types.at:139: $PREPARSER ./test -./calc.at:1374: cat stderr -input: + | 1//2 +./calc.at:1368: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) 1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) 1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) 1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: - | error -./calc.at:1387: $PREPARSER ./calc input -stderr: -./calc.at:1371: cat stderr -syntax error -input: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1374: $PREPARSER ./calc input -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -stderr: -./calc.at:1375: cat stderr -======== Testing with C++ standard flags: '' -syntax error -syntax error -syntax error input: -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + # + 1) = 1111 -./calc.at:1371: $PREPARSER ./calc input -syntax error -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -1.6: syntax error: invalid character: '#' -stderr: -input: -syntax error -syntax error -syntax error - | 1 + 2 * 3 + !+ ++ -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1362: $PREPARSER ./calc input +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1367: cat stderr +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token 1.6: syntax error: invalid character: '#' -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1371: cat stderr -input: -./calc.at:1387: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1374: cat stderr -stderr: -input: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1371: $PREPARSER ./calc input -input: -stderr: - | 1 = 2 = 3 -./calc.at:1387: $PREPARSER ./calc input -input: -stderr: -1.11-17: error: null divisor -stderr: -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1374: $PREPARSER ./calc input -stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.11-17: error: null divisor -syntax error -stderr: -stderr: -stdout: -./calc.at:1375: "$PERL" -pi -e 'use strict; +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86398,14 +79185,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./types.at:139: ./check -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1371: cat stderr -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1375: cat stderr input: -497. calc.at:1371: ok -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86415,104 +79196,148 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1374: $PREPARSER ./calc input -stderr: stderr: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -input: -./calc.at:1390: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - - | 1 + 2 * 3 + !* ++ -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1387: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1364: cat stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) stderr: +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 stderr: -1.14: memory exhausted - -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 127): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 102): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1390: $PREPARSER ./calc input - | - | +1 -./calc.at:1387: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -1.14: memory exhausted -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: cat stderr + | 1 = 2 = 3 +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1368: cat stderr stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -syntax error +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 input: -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 2 -./calc.at:1390: $PREPARSER ./calc input -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./calc.at:1375: cat stderr -stderr: -stdout: -./calc.at:1374: cat stderr -1.3: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86522,72 +79347,326 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: -1.3: syntax error - | (#) + (#) = 2222 -input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 + | 1 + 2 * 3 + !+ ++ +./calc.at:1363: $PREPARSER ./calc input input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1389: $PREPARSER ./calc input -505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc} ... -./calc.at:1393: mv calc.y.tmp calc.y - - | 1 + 2 * 3 + !* ++ -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: -stderr: -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1387: cat stderr -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error stderr: +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1362: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: stderr: -./calc.at:1387: $PREPARSER ./calc /dev/null -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -memory exhausted +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 19 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1367: cat stderr stderr: - | 1 2 -./calc.at:1389: $PREPARSER ./calc input -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1362: $PREPARSER ./calc input +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: -./calc.at:1375: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86597,129 +79676,283 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error stderr: -./calc.at:1375: cat stderr -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) input: - | 1//2 -./calc.at:1390: $PREPARSER ./calc input -syntax error +./calc.at:1363: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (!!) + (1 2) = 1 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1364: cat stderr input: stderr: -1.3: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1374: cat stderr -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -1.3: syntax error -stderr: -1.6: syntax error: invalid character: '#' -stderr: input: -stdout: - | (#) + (#) = 2222 -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1387: cat stderr -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $PREPARSER ./test -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 + | 1 = 2 = 3 +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 112): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 101): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 113): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 126): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 115): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 97): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 91): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token end of file (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token end of file (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | + | +1 +./calc.at:1364: $PREPARSER ./calc input stderr: -./calc.at:1387: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1390: cat stderr +./calc.at:1368: $PREPARSER ./calc input stderr: -./calc.at:1389: cat stderr -input: -======== Testing with C++ standard flags: '' -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 - | (# + 1) = 1111 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: $PREPARSER ./calc input +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 input: - | error -input: -stderr: -./calc.at:1390: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - | 1//2 -./calc.at:1389: $PREPARSER ./calc input -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1374: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !- ++ +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1362: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86730,58 +79963,206 @@ }eg ' expout || exit 77 stderr: -1.1: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1362: cat stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1375: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1375: cat stderr -./calc.at:1374: cat stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + # + 1) = 1111 -input: -./calc.at:1375: $PREPARSER ./calc input -./calc.at:1387: cat stderr - | (1 + #) = 1111 -./calc.at:1374: $PREPARSER ./calc input stderr: +./calc.at:1367: cat stderr +490. calc.at:1362: ok +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1368: cat stderr +stdout: stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1369: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +input: +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86791,31 +80172,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1390: cat stderr -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1387: $PREPARSER ./calc input -stderr: -stderr: -stderr: input: -syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' - | 1 = 2 = 3 -syntax error -error: 2222 != 1 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: $PREPARSER ./calc input -stderr: + | + | +1 +./calc.at:1368: $PREPARSER ./calc input stderr: -./calc.at:1389: cat stderr -stdout: -1.7: syntax error -./calc.at:1391: "$PERL" -ne ' + | (- *) + (1 2) = 1 +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1369: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -86826,8 +80190,10 @@ || /\t/ )' calc.c -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1375: "$PERL" -pi -e 'use strict; +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1363: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86837,16 +80203,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1364: cat stderr input: -stderr: - | error -./calc.at:1389: $PREPARSER ./calc input -syntax error +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) error: 2222 != 1 -input: -stderr: -1.7: syntax error +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -86860,92 +80223,128 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1375: cat stderr -./calc.at:1391: $PREPARSER ./calc input -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1369: $PREPARSER ./calc input stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -./types.at:139: ./check -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -syntax error -input: - | (1 + 1) / (1 - 1) -./calc.at:1374: cat stderr -./calc.at:1375: $PREPARSER ./calc input +./calc.at:1364: $PREPARSER ./calc /dev/null +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1363: cat stderr stderr: -1.11-17: error: null divisor stderr: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (# + 1) = 1111 -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1390: cat stderr +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: input: - | 1 2 -./calc.at:1391: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor -syntax error: invalid character: '#' +./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1367: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +Stack now 0 +./calc.at:1368: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1363: $PREPARSER ./calc input stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) input: -stderr: -stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS - | - | +1 -./calc.at:1390: $PREPARSER ./calc input -1.3: syntax error -syntax error: invalid character: '#' -./calc.at:1375: "$PERL" -pi -e 'use strict; +./calc.at:1368: $PREPARSER ./calc /dev/null +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -86955,90 +80354,5200 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -stdout: input: -2.1: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr - | (- *) + (1 2) = 1 -./calc.at:1387: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test -./calc.at:1375: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: + | (* *) + (*) + (*) +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + | 1 2 stderr: -input: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1389: $PREPARSER ./calc input -2.1: syntax error -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 123 -./calc.at:1375: $PREPARSER ./calc --num input stderr: stderr: -======== Testing with C++ standard flags: '' -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -error: 2222 != 1 -./calc.at:1391: cat stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +497. calc.at:1371: testing Calculator parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} api.push-pull=both api.pure=full parse.lac=full ... +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1364: cat stderr +./calc.at:1371: mv calc.y.tmp calc.y + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: -syntax error - | 1//2 -./calc.at:1391: $PREPARSER ./calc input +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1364: $PREPARSER ./calc input stderr: -./calc.at:1390: cat stderr -1.3: syntax error +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: +./calc.at:1367: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1369: cat stderr +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1363: cat stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Stack now 0 8 21 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Stack now 0 8 21 4 12 21 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Stack now 0 8 21 4 12 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 21 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 21 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 21 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Stack now 0 8 21 4 12 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Stack now 0 8 21 4 12 22 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 21 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Stack now 0 8 21 4 12 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 21 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 21 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: +./calc.at:1368: $PREPARSER ./calc input +input: + | 1//2 +stderr: +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1363: $PREPARSER ./calc input +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +./calc.at:1367: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1371: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1364: cat stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: +input: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) + | 1 + 2 * 3 + !- ++ +input: +./calc.at:1367: $PREPARSER ./calc input +stderr: + | (!!) + (1 2) = 1 +stderr: +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1368: cat stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1369: cat stderr +input: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1368: $PREPARSER ./calc input +./calc.at:1367: cat stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | error +stderr: +./calc.at:1369: $PREPARSER ./calc input +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +input: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Stack now 0 4 5 16 +Reducing stack by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 21 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1363: cat stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | 1 + 2 * 3 + !* ++ +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +memory exhausted +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 + | (1 + #) = 1111 +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1363: $PREPARSER ./calc input +memory exhausted +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1368: cat stderr +./calc.at:1364: cat stderr +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: cat stderr +stderr: +./calc.at:1367: cat stderr +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | (- *) + (1 2) = 1 +input: +./calc.at:1368: $PREPARSER ./calc input + | (- *) + (1 2) = 1 + | (#) + (#) = 2222 +./calc.at:1367: $PREPARSER ./calc input +stderr: +./calc.at:1364: $PREPARSER ./calc input +input: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1369: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +stderr: +stdout: +stderr: +./calc.at:1363: cat stderr +./types.at:139: $PREPARSER ./test +stderr: +stdout: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Stack now 0 8 21 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 21 4 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 21 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1368: cat stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1370: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1367: cat stderr +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1370: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +input: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1369: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1368: $PREPARSER ./calc input + | (1 + #) = 1111 +stderr: +./calc.at:1367: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +444. types.at:139: ok +input: +stderr: +./calc.at:1364: cat stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +syntax error: invalid character: '#' +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: $PREPARSER ./calc input +input: + | + | +1 +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: $PREPARSER ./calc input +stderr: +input: +stderr: +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (* *) + (*) + (*) +./calc.at:1364: $PREPARSER ./calc input +syntax error: invalid character: '#' +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1368: cat stderr + +./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1363: cat stderr +input: +./calc.at:1367: cat stderr + | 1 2 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 21 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 21 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1370: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1369: cat stderr +./calc.at:1368: $PREPARSER ./calc input +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +input: +input: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: $PREPARSER ./calc /dev/null + | (1 + # + 1) = 1111 + | (# + 1) = 1111 +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1367: $PREPARSER ./calc input +stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +498. calc.at:1374: testing Calculator %start input exp NUM api.value.type=union ... +./calc.at:1374: mv calc.y.tmp calc.y + +stderr: +./calc.at:1374: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +syntax error: invalid character: '#' +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1364: cat stderr +./calc.at:1370: cat stderr +stderr: +stderr: +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +syntax error: invalid character: '#' +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | 1 + 2 * 3 + !- ++ +input: +./calc.at:1368: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ + | 1//2 +./calc.at:1364: $PREPARSER ./calc input +stderr: +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1369: cat stderr +stderr: +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: cat stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1368: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 21 5 14 +Reducing stack by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1364: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +input: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +input: +./calc.at:1363: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1367: $PREPARSER ./calc input +./calc.at:1370: cat stderr +stderr: +stderr: + | 1 + 2 * 3 + !* ++ +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1368: $PREPARSER ./calc input +stderr: +stdout: +syntax error: invalid character: '#' +stderr: +./types.at:139: $PREPARSER ./test +input: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1363: $PREPARSER ./calc input +./calc.at:1374: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +1.14: memory exhausted +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1369: cat stderr +./calc.at:1364: $PREPARSER ./calc input + | error +./calc.at:1370: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stderr: +1.14: memory exhausted +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +./calc.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +stderr: +445. types.at:139: ok +./calc.at:1369: $PREPARSER ./calc input +stderr: +./calc.at:1368: cat stderr +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1370: cat stderr +input: + | (#) + (#) = 2222 +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1368: $PREPARSER ./calc input +stderr: +./calc.at:1367: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 21 5 13 +Reducing stack by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1363: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +input: +./calc.at:1363: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1367: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | 1 = 2 = 3 +./calc.at:1369: cat stderr +./calc.at:1370: $PREPARSER ./calc input +491. calc.at:1363: ok +stderr: + +error: null divisor +./calc.at:1364: cat stderr +stderr: +./calc.at:1367: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: cat stderr +input: +stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1364: $PREPARSER ./calc input +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +input: +stderr: +error: null divisor +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: + +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Stack now 0 8 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Stack now 0 8 21 30 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Stack now 0 8 21 30 22 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Stack now 0 8 21 30 22 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Stack now 0 8 21 30 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 21 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Stack now 0 8 21 5 15 +Reducing stack by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Stack now 0 8 21 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (1 + #) = 1111 +./calc.at:1368: $PREPARSER ./calc input +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1370: cat stderr +./types.at:139: $PREPARSER ./test +./calc.at:1367: cat stderr +1.6: syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1368: cat stderr +499. calc.at:1375: testing Calculator %start input exp NUM api.value.type=union %locations parse.error=detailed ... +./calc.at:1375: mv calc.y.tmp calc.y + +./calc.at:1364: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +493. calc.at:1367: ./calc.at:1375: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + ok +input: +input: + | + | +1 +./calc.at:1370: $PREPARSER ./calc input + | (# + 1) = 1111 +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1369: cat stderr +500. calc.at:1387: testing Calculator %glr-parser ... +./calc.at:1387: mv calc.y.tmp calc.y + +./calc.at:1368: $PREPARSER ./calc input +stdout: +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | (#) + (#) = 2222 +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1371: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +446. types.at:139: ok +stderr: + +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +input: +./calc.at:1371: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.2: syntax error: invalid character: '#' +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: cat stderr + | (* *) + (*) + (*) +./calc.at:1369: $PREPARSER ./calc input +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Stack now 0 8 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 21 4 +Reading a token +1.8: syntax error: invalid character: '#' +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Next token is token "invalid token" (1.8: ) +Error: discarding token "invalid token" (1.8: ) +Error: popping token error (1.8: ) +Stack now 0 8 21 4 +Shifting token error (1.8: ) +Entering state 11 +Stack now 0 8 21 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Stack now 0 8 21 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Stack now 0 8 21 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1370: $PREPARSER ./calc /dev/null +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +./calc.at:1368: cat stderr +501. calc.at:1389: testing Calculator %glr-parser %header ... + +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1389: mv calc.y.tmp calc.y + +input: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | (1 + # + 1) = 1111 +./calc.at:1368: $PREPARSER ./calc input +stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1389: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1387: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +./calc.at:1364: cat stderr +./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.6: syntax error: invalid character: '#' +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: cat stderr +./calc.at:1375: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1370: cat stderr +stderr: +1.6: syntax error: invalid character: '#' +input: +input: +502. calc.at:1390: testing Calculator %glr-parser %locations ... + | (1 + #) = 1111 + | 1 2 +./calc.at:1390: mv calc.y.tmp calc.y + +./calc.at:1364: $PREPARSER ./calc input +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1369: $PREPARSER ./calc input +input: +./calc.at:1390: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1371: $PREPARSER ./calc input +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +./calc.at:1370: $PREPARSER ./calc input +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1368: cat stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1369: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +input: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (1 + 1) / (1 - 1) +./calc.at:1368: $PREPARSER ./calc input +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: +input: +1.11-17: error: null divisor +./calc.at:1368: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1389: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +stderr: +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1370: cat stderr +./calc.at:1390: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +./calc.at:1364: cat stderr +./calc.at:1374: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +1.11-17: error: null divisor +input: + | 1//2 +./calc.at:1371: $PREPARSER ./calc input +stderr: +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1374: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1370: $PREPARSER ./calc input +stderr: +input: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1368: cat stderr + | (# + 1) = 1111 +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1369: cat stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1374: $PREPARSER ./calc input +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +494. calc.at:1368: ok +stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +input: +stderr: +stderr: +./calc.at:1371: cat stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.2: ) +Error: discarding token "invalid token" (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !* ++ +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1370: cat stderr +stderr: +stderr: +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.14: memory exhausted +input: + +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1364: cat stderr +./calc.at:1371: $PREPARSER ./calc input +stderr: +input: +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | (- *) + (1 2) = 1 +1.14: memory exhausted +input: +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 2 +./calc.at:1374: $PREPARSER ./calc input +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +input: +stderr: +syntax error +stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1364: $PREPARSER ./calc input +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1369: cat stderr +stderr: +stderr: +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +503. calc.at:1391: testing Calculator %glr-parser %locations api.location.type={Span} ... +input: +./calc.at:1391: mv calc.y.tmp calc.y + + | (#) + (#) = 2222 +./calc.at:1371: cat stderr +./calc.at:1391: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1370: cat stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +1.6: syntax error: invalid character: '#' +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token "invalid token" (1.6: ) +Error: discarding token "invalid token" (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Stack now 0 4 11 26 +Reducing stack by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Stack now 0 8 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Stack now 0 8 19 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Stack now 0 8 19 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +./calc.at:1370: $PREPARSER ./calc input + | 1 = 2 = 3 +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1371: $PREPARSER ./calc input +stderr: +./calc.at:1374: cat stderr +stderr: +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1364: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +input: +./calc.at:1369: cat stderr +./calc.at:1364: cat stderr + | 1//2 +./calc.at:1374: $PREPARSER ./calc input +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +./calc.at:1391: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +syntax error +./calc.at:1370: cat stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | (1 + #) = 1111 +stderr: +syntax error +./calc.at:1369: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1364: $PREPARSER ./calc input +./calc.at:1371: cat stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1374: $PREPARSER ./calc input -./calc.at:1387: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !+ ++ + | + | +1 +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1370: $PREPARSER ./calc input +stderr: +./calc.at:1364: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87048,24 +85557,158 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1390: $PREPARSER ./calc /dev/null +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Stack now 0 4 12 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Stack now 0 4 12 21 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Stack now 0 4 12 21 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Stack now 0 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Stack now 0 8 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 23 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Stack now 0 8 23 4 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 23 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Stack now 0 8 23 4 12 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Stack now 0 8 23 4 12 20 1 +Reducing stack by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Stack now 0 8 23 4 12 20 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 23 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Stack now 0 8 23 4 12 27 +Reducing stack by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Stack now 0 8 23 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Stack now 0 8 25 +Reducing stack by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Stack now 0 6 17 +Stack now 0 6 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: +./calc.at:1370: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -1.1: syntax error -syntax error: invalid character: '#' -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: cat stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1374: cat stderr input: -stderr: - | 1 + 2 * 3 -stderr: -./calc.at:1375: $PREPARSER ./calc --num input -1.3: syntax error -syntax error: invalid character: '#' -stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1364: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87075,8 +85718,74 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error, unexpected '+', expecting end of file -./calc.at:1391: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !- ++ +./calc.at:1370: $PREPARSER ./calc input +stderr: +input: +./calc.at:1371: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: $PREPARSER ./calc input +./calc.at:1364: cat stderr +./calc.at:1371: $PREPARSER ./calc /dev/null +stderr: + | error +./calc.at:1374: $PREPARSER ./calc input +stderr: +stderr: +stderr: +stdout: +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.2: syntax error: invalid character: '#' +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: $EGREP '(malloc|free) *\(' calc.[ch] | $EGREP -v 'INFRINGES ON USER NAME SPACE' +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +492. calc.at:1364: ok +stderr: +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1375: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1370: cat stderr +stderr: +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.2: syntax error: invalid character: '#' +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +input: +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1371: cat stderr + + | 1 + 2 * 3 + !* ++ +stderr: +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1369: cat stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87086,7 +85795,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +1.14: memory exhausted +./calc.at:1371: $PREPARSER ./calc input +input: +stderr: +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 + | (1 + # + 1) = 1111 +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: $PREPARSER ./calc input +stderr: +./calc.at:1374: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.6: syntax error: invalid character: '#' +stderr: + | 1 2 +./calc.at:1375: $PREPARSER ./calc input +stderr: +1.14: memory exhausted +stderr: +1.3: syntax error, unexpected number +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +input: + | 1 = 2 = 3 +./calc.at:1374: $PREPARSER ./calc input +stderr: +./calc.at:1370: cat stderr +1.3: syntax error, unexpected number +stderr: +504. calc.at:1392: testing Calculator %glr-parser %name-prefix "calc" ... +./calc.at:1392: mv calc.y.tmp calc.y + +./calc.at:1392: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1371: cat stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87096,11 +85858,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: cat stderr stderr: -./calc.at:1390: cat stderr input: +syntax error + | (#) + (#) = 2222 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1369: cat stderr +input: +./calc.at:1375: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | (!!) + (1 2) = 1 +./calc.at:1371: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +input: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1369: $PREPARSER ./calc input ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87111,19 +85892,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error, unexpected '+', expecting end of file - | (* *) + (*) + (*) -./calc.at:1387: $PREPARSER ./calc input -stderr: -./calc.at:1389: cat stderr -./calc.at:1391: cat stderr input: -syntax error -syntax error -syntax error -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1390: $PREPARSER ./calc input + | 1//2 +./calc.at:1392: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +./calc.at:1375: $PREPARSER ./calc input +1.11-17: error: null divisor +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1369: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1370: cat stderr +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 ./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87135,48 +85919,52 @@ }eg ' expout || exit 77 stderr: +./calc.at:1374: cat stderr +1.11-17: error: null divisor +./calc.at:1375: cat stderr input: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1370: $PREPARSER ./calc input +input: +./calc.at:1371: cat stderr +./calc.at:1369: cat stderr input: | | +1 -./calc.at:1389: $PREPARSER ./calc input +./calc.at:1374: $PREPARSER ./calc input stderr: | error -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1374: cat stderr +./calc.at:1375: $PREPARSER ./calc input +495. calc.at:1369: ok +stderr: +input: +1.6: syntax error: invalid character: '#' stderr: syntax error -syntax error -syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1371: $PREPARSER ./calc input +1.1: syntax error, unexpected invalid token +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 stderr: -./calc.at:1375: cat stderr +1.6: syntax error: invalid character: '#' stderr: -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -1.1: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./types.at:139: ./check - | (1 + 1) / (1 - 1) +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 stderr: -./calc.at:1374: $PREPARSER ./calc input -input: syntax error -./types.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o test.cc test.y -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error, unexpected invalid token +stderr: +./calc.at:1370: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 + +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87186,15 +85974,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: - | 1 + 2 * 3 -./calc.at:1375: $PREPARSER ./calc --exp input -error: null divisor -1.1: syntax error -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1387: "$PERL" -pi -e 'use strict; +input: + | (# + 1) = 1111 +./calc.at:1371: cat stderr +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87204,34 +85988,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1375: cat stderr stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: + | 1 = 2 = 3 stderr: -stdout: -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -error: null divisor +./calc.at:1375: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1371: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1374: cat stderr +1.7: syntax error, unexpected '=' ./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: cat stderr -./calc.at:1392: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +./calc.at:1374: $PREPARSER ./calc /dev/null +1.7: syntax error, unexpected '=' +505. calc.at:1393: testing Calculator %glr-parser api.prefix={calc} ... +./calc.at:1370: cat stderr +stderr: +./calc.at:1393: mv calc.y.tmp calc.y -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87241,30 +86030,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: + | (1 + # + 1) = 1111 +stderr: +./calc.at:1370: $PREPARSER ./calc input +stderr: +./calc.at:1375: cat stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +1.6: syntax error: invalid character: '#' +syntax error +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: cat stderr input: -./calc.at:1387: cat stderr -./calc.at:1391: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1392: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1390: $PREPARSER ./calc input + | +1 +./calc.at:1375: $PREPARSER ./calc input stderr: stderr: +2.1: syntax error, unexpected '+' +1.6: syntax error: invalid character: '#' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 + !+ ++ ./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87275,37 +86065,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -499. calc.at:1375: ok -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1389: $PREPARSER ./calc /dev/null -stderr: -stderr: - | 1 = 2 = 3 -./calc.at:1391: $PREPARSER ./calc input -stderr: +./calc.at:1371: $PREPARSER ./calc input stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 stderr: -./types.at:139: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -syntax error -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1370: cat stderr +2.1: syntax error, unexpected '+' +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: ./calc.at:1374: cat stderr -1.7: syntax error - | 1 2 -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; + | (1 + 1) / (1 - 1) +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87315,28 +86084,52 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1370: $PREPARSER ./calc input +./calc.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS stderr: stderr: - +./calc.at:1371: $EGREP -c -v 'Return for a new token:|LAC:' stderr +1.11-17: error: null divisor +input: +./calc.at:1375: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1370: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +input: +1.11-17: error: null divisor syntax error syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 4444 != 1 +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: - | 123 -./calc.at:1374: $PREPARSER ./calc --num input -1.7: syntax error +./calc.at:1375: $PREPARSER ./calc /dev/null + | 1 + 2 * 3 + !- ++ +./calc.at:1371: $PREPARSER ./calc input stderr: +stdout: +1.1: syntax error, unexpected end of file stderr: +stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./types.at:139: $PREPARSER ./test syntax error -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1390: cat stderr +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1370: cat stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1391: "$PERL" -pi -e 'use strict; +stderr: +1.1: syntax error, unexpected end of file +496. calc.at:1370: ok +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87346,8 +86139,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1375: cat stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87357,12 +86150,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: - | (- *) + (1 2) = 1 -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1392: "$PERL" -pi -e 'use strict; +452. types.at:139: ok + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1371: cat stderr +stderr: +./calc.at:1374: cat stderr + +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1 + 2 * 3 + !* ++ +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1371: $PREPARSER ./calc input +input: +stderr: + + | (!!) + (1 2) = 1 +./calc.at:1374: $PREPARSER ./calc input +1.14: memory exhausted +stderr: +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87372,29 +86190,51 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error +error: 2222 != 1 +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: cat stderr stderr: stderr: +syntax error +error: 2222 != 1 +1.14: memory exhausted +input: 506. calc.at:1394: testing Calculator %glr-parser %verbose ... ./calc.at:1394: mv calc.y.tmp calc.y -input: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (!!) + (1 2) = 1 +./calc.at:1375: $PREPARSER ./calc input ./calc.at:1394: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y - | 1 + 2 * 3 -./calc.at:1374: $PREPARSER ./calc --num input -./calc.at:1391: cat stderr stderr: +./calc.at:1371: cat stderr +507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose ... +./calc.at:1395: mv calc.y.tmp calc.y + +stdout: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1389: cat stderr -syntax error -./calc.at:1392: cat stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +stderr: +stdout: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87404,12 +86244,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1389: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + input: -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 | - | +1 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1390: "$PERL" -pi -e 'use strict; + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 + | (#) + (#) = 2222 +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1371: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87420,79 +86286,120 @@ }eg ' expout || exit 77 stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stdout: input: -input: -2.1: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1374: cat stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1389: $PREPARSER ./calc input -syntax error - | 1//2 -./calc.at:1392: $PREPARSER ./calc input +./calc.at:1375: cat stderr stderr: stderr: -./calc.at:1387: cat stderr -2.1: syntax error +stderr: +input: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | (- *) + (1 2) = 1 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1374: $PREPARSER ./calc input +stderr: +input: +./calc.at:1390: $PREPARSER ./calc input +input: +stderr: + | (- *) + (1 2) = 1 +./calc.at:1395: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | 1 2 +./calc.at:1375: $PREPARSER ./calc input +input: +./calc.at:1387: $PREPARSER ./calc input stderr: syntax error syntax error +error: 2222 != 1 + | 1 2 +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: cat stderr +stderr: +./calc.at:1389: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -error: 4444 != 1 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: cat stderr -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: stderr: -./calc.at:1374: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error -./calc.at:1391: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !* ++ -input: -./calc.at:1387: $PREPARSER ./calc input ./calc.at:1394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +stderr: +input: +stderr: syntax error syntax error syntax error +error: 2222 != 1 syntax error -error: 4444 != 1 + | (1 + #) = 1111 stderr: - | (* *) + (*) + (*) +./calc.at:1371: $PREPARSER ./calc input +input: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +stderr: + | 1 2 ./calc.at:1390: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: -memory exhausted -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1374: cat stderr -stderr: -stderr: -./calc.at:1391: cat stderr -memory exhausted -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87502,11 +86409,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87516,14 +86419,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 -./calc.at:1391: $PREPARSER ./calc /dev/null -./calc.at:1374: $PREPARSER ./calc --exp input -stderr: -stderr: -1.1: syntax error -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87533,7 +86429,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87545,18 +86440,15 @@ }eg ' expout || exit 77 stderr: -./calc.at:1392: cat stderr -./calc.at:1389: cat stderr -1.1: syntax error +1.3: syntax error stderr: -./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -./calc.at:1390: cat stderr - | error -./calc.at:1392: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1389: cat stderr +./calc.at:1375: cat stderr +./calc.at:1374: cat stderr +./calc.at:1371: cat stderr ./calc.at:1387: cat stderr -input: -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87566,42 +86458,65 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -498. calc.at:1374: ok input: -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1389: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1390: $PREPARSER ./calc input input: +input: +input: + | 1//2 + | 1//2 + | (* *) + (*) + (*) +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1375: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1371: $PREPARSER ./calc input stderr: stderr: +stderr: +input: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -error: 2222 != 1 +syntax error + | (* *) + (*) + (*) +./calc.at:1374: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +./calc.at:1390: cat stderr ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1391: cat stderr stderr: -syntax error -syntax error: invalid character: '#' -syntax error: invalid character: '#' +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -error: 2222 != 1 - +syntax error +syntax error +syntax error +syntax error +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' stderr: stderr: +1.2: syntax error: invalid character: '#' input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -input: -./calc.at:1392: "$PERL" -pi -e 'use strict; +stderr: +stdout: + | 1//2 +./calc.at:1390: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test +syntax error +syntax error +syntax error +stderr: +1.3: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87611,10 +86526,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1391: $PREPARSER ./calc input -./calc.at:1390: $PREPARSER ./calc input +stderr: +stderr: ./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87625,17 +86538,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +stdout: ./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87646,14 +86549,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1392: cat stderr +./calc.at:1371: cat stderr +./types.at:139: $PREPARSER ./test +input: +stderr: +stderr: + | (1 + # + 1) = 1111 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: cat stderr +1.3: syntax error +./calc.at:1371: $PREPARSER ./calc input ./calc.at:1389: cat stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +stderr: +450. types.at:139: ok +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87663,7 +86573,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: "$PERL" -pi -e 'use strict; +input: +1.6: syntax error: invalid character: '#' +input: +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87673,54 +86586,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -507. calc.at:1395: testing Calculator %glr-parser parse.error=verbose ... -./calc.at:1395: mv calc.y.tmp calc.y - -input: -input: ./calc.at:1387: cat stderr - | 1 = 2 = 3 - | (- *) + (1 2) = 1 -./calc.at:1392: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1375: $PREPARSER ./calc input + | error ./calc.at:1389: $PREPARSER ./calc input -./calc.at:1395: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: +447. types.at:139: ok +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error -syntax error -error: 2222 != 1 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: cat stderr -./calc.at:1391: cat stderr +stderr: input: - | (1 + #) = 1111 + | error +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1387: $PREPARSER ./calc input stderr: +1.6: syntax error: invalid character: '#' + stderr: stderr: syntax error + +./calc.at:1390: cat stderr +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +./calc.at:1374: cat stderr syntax error -error: 2222 != 1 -syntax error: invalid character: '#' ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error input: - | 1 + 2 * 3 + !* ++ +stderr: +./calc.at:1371: cat stderr + | error ./calc.at:1390: $PREPARSER ./calc input input: -stderr: - | (!!) + (1 2) = 1 -stderr: -./calc.at:1391: $PREPARSER ./calc input -syntax error: invalid character: '#' -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -1.14: memory exhausted -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +syntax error +./calc.at:1375: $PREPARSER ./calc input ./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87731,18 +86633,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: +stderr: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1374: $PREPARSER ./calc input +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +input: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1371: $PREPARSER ./calc input +stderr: stderr: stderr: +stderr: +1.1: syntax error +1.11-17: error: null divisor +508. calc.at:1397: testing Calculator %glr-parser api.pure %locations ... ./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87753,14 +86663,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11: syntax error -1.1-16: error: 2222 != 1 -1.14: memory exhausted -./calc.at:1395: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1371: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: mv calc.y.tmp calc.y + ./calc.at:1389: cat stderr -./calc.at:1392: cat stderr -./calc.at:1387: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87770,6 +86679,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +1.11-17: error: null divisor +./calc.at:1375: cat stderr ./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87780,31 +86692,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations ... +./calc.at:1398: mv calc.y.tmp calc.y + +./calc.at:1387: cat stderr input: -input: - | - | +1 - | (* *) + (*) + (*) +./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | 1 = 2 = 3 ./calc.at:1389: $PREPARSER ./calc input -./calc.at:1392: $PREPARSER ./calc input -stderr: +input: +./calc.at:1371: cat stderr input: stderr: -syntax error -syntax error + | 1 + 2 * 3 + !* ++ +./calc.at:1375: $PREPARSER ./calc input syntax error ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (# + 1) = 1111 + | 1 + 2 * 3 + !- ++ +./calc.at:1374: $PREPARSER ./calc input +input: stdout: -./calc.at:1387: $PREPARSER ./calc input -syntax error -./calc.at:1391: cat stderr +497. calc.at:1371: ok stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./calc.at:1390: cat stderr -./calc.at:1393: "$PERL" -ne ' +1.14: memory exhausted +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1387: $PREPARSER ./calc input +syntax error +stderr: +./calc.at:1391: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -87815,21 +86734,16 @@ || /\t/ )' calc.c +stderr: +stderr: +1.14: memory exhausted +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -syntax error -syntax error -syntax error: invalid character: '#' ./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: input: -input: -stderr: - | (- *) + (1 2) = 1 + | 1 = 2 = 3 stderr: - | (#) + (#) = 2222 -syntax error -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1391: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -87843,21 +86757,71 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1393: $PREPARSER ./calc input +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -syntax error: invalid character: '#' stderr: + stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +./calc.at:1375: cat stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +syntax error +./calc.at:1397: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +stderr: +./calc.at:1389: cat stderr +input: +stderr: +1.7: syntax error + | 1 2 +./calc.at:1391: $PREPARSER ./calc input +input: +stderr: +input: +./calc.at:1398: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | (#) + (#) = 2222 +1.3: syntax error ./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: $PREPARSER ./calc input + | + | +1 +./calc.at:1389: $PREPARSER ./calc input stderr: -./calc.at:1389: "$PERL" -pi -e 'use strict; +stderr: +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87867,13 +86831,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87883,7 +86845,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1387: "$PERL" -pi -e 'use strict; +1.3: syntax error +510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... +./calc.at:1400: mv calc.y.tmp calc.y + +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1387: cat stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87893,8 +86864,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1390: cat stderr +./calc.at:1374: cat stderr +syntax error +./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1391: cat stderr +input: +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87904,8 +86880,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !* ++ +./calc.at:1374: $PREPARSER ./calc input input: -./calc.at:1391: "$PERL" -pi -e 'use strict; +input: +input: +stderr: + | + | +1 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1375: cat stderr +memory exhausted +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1//2 + | + | +1 +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1391: $PREPARSER ./calc input +2.1: syntax error +stderr: +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87915,46 +86910,103 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr - | 1 2 -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1392: cat stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1387: cat stderr syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: $PREPARSER ./calc /dev/null +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1390: cat stderr -./calc.at:1391: cat stderr stderr: +1.3: syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 + 2 * 3 + !+ ++ -input: -./calc.at:1389: $PREPARSER ./calc input -syntax error - | (1 + # + 1) = 1111 + | (1 + #) = 1111 +./calc.at:1375: $PREPARSER ./calc input +memory exhausted +2.1: syntax error +stderr: +stderr: +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -./calc.at:1387: $PREPARSER ./calc input -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: cat stderr +1.3: syntax error +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: $PREPARSER ./calc /dev/null stderr: -input: +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: - | (1 + #) = 1111 +1.6: syntax error: invalid character: '#' +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: $PREPARSER ./calc input -input: +./calc.at:1390: cat stderr +stderr: +./calc.at:1400: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1390: $PREPARSER ./calc /dev/null +./calc.at:1391: cat stderr +./calc.at:1374: cat stderr stderr: stderr: -syntax error: invalid character: '#' -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' +./calc.at:1387: cat stderr +./calc.at:1375: cat stderr +stdout: +1.1: syntax error ./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -syntax error -./calc.at:1391: $PREPARSER ./calc input +./types.at:139: $PREPARSER ./test +./calc.at:1387: $PREPARSER ./calc /dev/null +input: +input: stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87965,18 +87017,46 @@ }eg ' expout || exit 77 stderr: + | error +./calc.at:1391: $PREPARSER ./calc input + | (#) + (#) = 2222 stderr: input: -1.6: syntax error: invalid character: '#' -1.2: syntax error -1.10: syntax error -1.16: syntax error +./calc.at:1374: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1375: $PREPARSER ./calc input stderr: +stdout: +syntax error +stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error ./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: syntax error: invalid character: '#' - | 1 + 2 * 3 + !- ++ -./calc.at:1389: $PREPARSER ./calc input +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1389: cat stderr ./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -87987,7 +87067,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: "$PERL" -pi -e 'use strict; +syntax error +448. types.at:139: ok +stderr: +1.1: syntax error +input: +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +1.2: syntax error: invalid character: '#' +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -87998,18 +87099,8 @@ }eg ' expout || exit 77 stderr: -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1393: cat stderr -stderr: input: - | 1//2 -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1390: cat stderr -./calc.at:1392: cat stderr -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88019,7 +87110,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88029,19 +87120,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | (# + 1) = 1111 -./calc.at:1390: $PREPARSER ./calc input -./calc.at:1392: $PREPARSER ./calc input -stderr: -./calc.at:1387: cat stderr -./calc.at:1391: cat stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88051,34 +87133,88 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error + stderr: +./calc.at:1390: cat stderr +./calc.at:1391: cat stderr stderr: -1.2: syntax error: invalid character: '#' syntax error syntax error syntax error syntax error error: 4444 != 1 -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: cat stderr +./calc.at:1375: cat stderr +./calc.at:1374: cat stderr input: stderr: input: +input: +input: + | 1 2 syntax error syntax error syntax error syntax error error: 4444 != 1 - | (1 + 1) / (1 - 1) - | 1 + 2 * 3 + !+ ++ -./calc.at:1387: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' + | (1 + # + 1) = 1111 +./calc.at:1375: $PREPARSER ./calc input +./calc.at:1392: $PREPARSER ./calc input +stderr: +input: +1.6: syntax error: invalid character: '#' + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1374: $PREPARSER ./calc input +input: + | 1 = 2 = 3 ./calc.at:1391: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +./calc.at:1387: $PREPARSER ./calc input stderr: stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; +stderr: +stderr: +1.6: syntax error: invalid character: '#' +stderr: +syntax error +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +1.7: syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +syntax error +stderr: +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88088,11 +87224,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: cat stderr -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor -./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1390: "$PERL" -pi -e 'use strict; +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88102,8 +87234,48 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.7: syntax error +511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure ... +./calc.at:1401: mv calc.y.tmp calc.y + stderr: +syntax error: invalid character: '#' +./calc.at:1375: cat stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: + | (1 + 1) / (1 - 1) +./calc.at:1389: cat stderr +./calc.at:1375: $PREPARSER ./calc input ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88115,24 +87287,45 @@ }eg ' expout || exit 77 stderr: - | 1 + 2 * 3 + !* ++ +1.11-17: error: null divisor +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr +input: +stderr: +./calc.at:1387: cat stderr +./calc.at:1390: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1389: $PREPARSER ./calc input -error: null divisor input: +1.11-17: error: null divisor stderr: - | 1 + 2 * 3 + !- ++ + | + | +1 ./calc.at:1391: $PREPARSER ./calc input -memory exhausted +./calc.at:1392: cat stderr +syntax error +error: 2222 != 1 ./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1393: cat stderr -./calc.at:1390: cat stderr +input: +2.1: syntax error ./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1392: cat stderr -memory exhausted input: -./calc.at:1387: "$PERL" -pi -e 'use strict; +./calc.at:1374: cat stderr +stderr: + | (!!) + (1 2) = 1 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1375: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88142,21 +87335,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error + | (!!) + (1 2) = 1 +./calc.at:1387: $PREPARSER ./calc input +syntax error +error: 2222 != 1 input: stderr: -./calc.at:1393: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1390: $PREPARSER ./calc input stderr: stderr: +./calc.at:1375: cat stderr +2.1: syntax error +1.11: syntax error +1.1-16: error: 2222 != 1 + | 1//2 +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: $PREPARSER ./calc input +syntax error +error: 2222 != 1 +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stderr: +stderr: +stderr: +./calc.at:1401: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS syntax error - | (!!) + (1 2) = 1 -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: 2222 != 1 + | (# + 1) = 1111 +syntax error +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 +input: +stderr: + | 123 stderr: ./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -88168,10 +87380,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1375: $PREPARSER ./calc --num input +syntax error: invalid character: '#' syntax error -error: 2222 != 1 -./calc.at:1387: cat stderr -stderr: ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88182,11 +87393,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -stderr: -syntax error -500. calc.at:1387: ok +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -88198,11 +87405,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -error: 2222 != 1 +stderr: +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +stderr: ./calc.at:1389: cat stderr ./calc.at:1391: cat stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88212,11 +87421,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (#) + (#) = 2222 -./calc.at:1389: $PREPARSER ./calc input -stderr: -./calc.at:1390: cat stderr ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88227,44 +87431,81 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1390: cat stderr input: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1391: $PREPARSER ./calc input - +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1391: $PREPARSER ./calc /dev/null stderr: + | (- *) + (1 2) = 1 +./calc.at:1392: cat stderr +./calc.at:1389: $PREPARSER ./calc input input: -./calc.at:1393: cat stderr +1.1: syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (1 + 1) / (1 - 1) +./calc.at:1387: cat stderr + | 1 + 2 * 3 +input: +./calc.at:1375: $PREPARSER ./calc --num input +syntax error +syntax error +error: 2222 != 1 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 ./calc.at:1390: $PREPARSER ./calc input -./calc.at:1392: cat stderr -1.14: memory exhausted -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11-17: error: null divisor -./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: +1.3: syntax error, unexpected '+', expecting end of file +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 stderr: +1.1: syntax error +./calc.at:1374: cat stderr + | error +./calc.at:1392: $PREPARSER ./calc input input: -1.14: memory exhausted +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 + | (- *) + (1 2) = 1 input: +./calc.at:1387: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1374: $PREPARSER ./calc input stderr: - | 1 = 2 = 3 - | (- *) + (1 2) = 1 -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1393: $PREPARSER ./calc input -1.11-17: error: null divisor +syntax error: invalid character: '#' +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 syntax error syntax error error: 2222 != 1 stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +stderr: +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88274,9 +87515,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected '+', expecting end of file +stderr: +stderr: +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88286,7 +87528,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +syntax error +syntax error +error: 2222 != 1 +syntax error ./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88297,23 +87542,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -syntax error -error: 2222 != 1 stderr: -508. calc.at:1397: testing Calculator %glr-parser api.pure %locations ... -./calc.at:1397: mv calc.y.tmp calc.y - -syntax error -./calc.at:1397: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stdout: ./calc.at:1389: cat stderr +./calc.at:1395: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + ./calc.at:1391: cat stderr +./calc.at:1375: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1390: cat stderr -input: -502. calc.at:1390: ok - | (1 + #) = 1111 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1374: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88324,8 +87589,9 @@ }eg ' expout || exit 77 input: -stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1375: cat stderr +input: +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88335,35 +87601,131 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1395: $PREPARSER ./calc input +stderr: + | (* *) + (*) + (*) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: ./calc.at:1391: $PREPARSER ./calc input -syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: cat stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1389: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1390: $PREPARSER ./calc input + | 1 + 2 * 3 +stdout: +./calc.at:1375: $PREPARSER ./calc --exp input +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1394: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + | (* *) + (*) + (*) +./calc.at:1387: $PREPARSER ./calc input +stderr: +stderr: +syntax error +syntax error +syntax error +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +./calc.at:1374: cat stderr +syntax error +syntax error +syntax error +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1392: cat stderr -syntax error: invalid character: '#' +./calc.at:1375: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1393: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: - | (* *) + (*) + (*) -./calc.at:1392: $PREPARSER ./calc input input: +syntax error +syntax error +syntax error stderr: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 | - | +1 -./calc.at:1393: $PREPARSER ./calc input + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 syntax error syntax error syntax error -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1394: $PREPARSER ./calc input + | 1 2 +stderr: +./calc.at:1395: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +input: +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +stderr: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1374: $PREPARSER ./calc input +syntax error, unexpected number +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1375: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +input: +stderr: + | 1 = 2 = 3 +error: null divisor +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: $PREPARSER ./calc input +syntax error, unexpected number ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88374,8 +87736,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1389: "$PERL" -pi -e 'use strict; +stderr: +stderr: +stderr: +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88385,19 +87749,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +499. calc.at:1375: ok +error: null divisor syntax error +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./calc.at:1389: cat stderr -509. calc.at:1398: testing Calculator %glr-parser parse.error=verbose %locations ... -./calc.at:1398: mv calc.y.tmp calc.y - -./calc.at:1398: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88407,12 +87774,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1391: cat stderr -stdout: input: -./calc.at:1397: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1394: "$PERL" -ne ' + | 1 2 +stdout: +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1393: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -88423,12 +87789,15 @@ || /\t/ )' calc.c - | (# + 1) = 1111 -./calc.at:1389: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: "$PERL" -pi -e 'use strict; +syntax error +stderr: +./calc.at:1387: cat stderr +syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr +./calc.at:1389: cat stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88438,11 +87807,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: cat stderr input: -input: - | (1 + #) = 1111 -./calc.at:1391: $PREPARSER ./calc input +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -88456,26 +87833,26 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1394: $PREPARSER ./calc input -stderr: -stderr: + +input: +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1390: cat stderr + | 1 + 2 * 3 + !+ ++ stderr: +./calc.at:1387: $PREPARSER ./calc input input: -1.6: syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1393: cat stderr -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: +./calc.at:1395: cat stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +./calc.at:1391: $PREPARSER ./calc input | 1 + 2 * 3 + !+ ++ -./calc.at:1392: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: $PREPARSER ./calc input stderr: -./calc.at:1393: $PREPARSER ./calc /dev/null stderr: -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88485,7 +87862,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1389: "$PERL" -pi -e 'use strict; +1.11: syntax error +1.1-16: error: 2222 != 1 +input: +stderr: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1374: cat stderr +input: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +input: +./calc.at:1393: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88495,34 +87885,51 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error +./calc.at:1390: $PREPARSER ./calc input + | 1//2 input: -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: $PREPARSER ./calc input stderr: - | 1 2 -./calc.at:1394: $PREPARSER ./calc input input: stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1392: $PREPARSER ./calc input -syntax error stderr: stderr: -./calc.at:1391: cat stderr -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: cat stderr -./calc.at:1398: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS + | 123 +./calc.at:1374: $PREPARSER ./calc --num input +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1392: cat stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 stderr: stderr: input: - | (# + 1) = 1111 -./calc.at:1391: $PREPARSER ./calc input +stderr: +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error + | 1 + 2 * 3 + !- ++ + | + | +1 +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1389: $PREPARSER ./calc input +stderr: +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: input: -./calc.at:1393: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88532,15 +87939,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' - | (1 + # + 1) = 1111 -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr +syntax error +./calc.at:1394: cat stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose ... +./calc.at:1402: mv calc.y.tmp calc.y + + | 1 + 2 * 3 + !- ++ +./calc.at:1390: $PREPARSER ./calc input +input: stderr: -./calc.at:1392: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88550,11 +87961,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error: invalid character: '#' +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -./calc.at:1393: cat stderr -syntax error: invalid character: '#' -./calc.at:1394: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 +./calc.at:1374: $PREPARSER ./calc --num input +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +syntax error +stderr: + | 1//2 +syntax error +./calc.at:1395: cat stderr +./calc.at:1391: cat stderr +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88564,8 +87993,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88575,9 +88004,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +syntax error +stderr: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: cat stderr input: input: -./calc.at:1389: "$PERL" -pi -e 'use strict; + | error +./calc.at:1395: $PREPARSER ./calc input +stderr: +syntax error, unexpected invalid token + | (- *) + (1 2) = 1 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1389: cat stderr +stderr: +stderr: +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88587,49 +88032,100 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1394: cat stderr -./calc.at:1393: $PREPARSER ./calc input - | 1 + 2 * 3 + !* ++ -./calc.at:1392: $PREPARSER ./calc input stderr: +./calc.at:1387: cat stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -memory exhausted -./calc.at:1391: cat stderr -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: | 1//2 -stderr: -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1389: cat stderr +./calc.at:1393: $PREPARSER ./calc input +syntax error, unexpected invalid token + | 1 + 2 * 3 + !* ++ +./calc.at:1389: $PREPARSER ./calc input stderr: input: +./calc.at:1392: cat stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +stderr: +memory exhausted +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !* ++ syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: $PREPARSER ./calc /dev/null +./calc.at:1387: $PREPARSER ./calc input +stderr: stderr: - | (1 + # + 1) = 1111 -./calc.at:1391: $PREPARSER ./calc input memory exhausted -input: +memory exhausted stderr: - | (1 + 1) / (1 - 1) -1.6: syntax error: invalid character: '#' -./calc.at:1389: $PREPARSER ./calc input -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1390: cat stderr +syntax error +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88639,11 +88135,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: syntax error -error: null divisor -./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: cat stderr +memory exhausted +./calc.at:1402: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +input: +./calc.at:1395: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1391: cat stderr +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: cat stderr +./calc.at:1394: cat stderr stderr: -1.6: syntax error: invalid character: '#' +input: +input: +input: + | 1 + 2 * 3 +./calc.at:1374: $PREPARSER ./calc --exp input +1.14: memory exhausted +input: + | 1 = 2 = 3 +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: $PREPARSER ./calc input +stderr: + | (* *) + (*) + (*) ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88655,9 +88181,15 @@ }eg ' expout || exit 77 stderr: -./calc.at:1393: cat stderr -error: null divisor -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1391: $PREPARSER ./calc input +syntax error, unexpected '=' + | error +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: $PREPARSER ./calc input +stderr: +input: +stderr: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88667,8 +88199,49 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error + | (#) + (#) = 2222 +syntax error +./calc.at:1393: cat stderr +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.14: memory exhausted +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error, unexpected '=' +stderr: +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1374: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' input: - | (!!) + (1 2) = 1 +./calc.at:1392: cat stderr +stderr: +syntax error + | error +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1387: cat stderr ./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88679,14 +88252,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1392: cat stderr +stderr: +./calc.at:1374: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: syntax error -error: 2222 != 1 -./calc.at:1394: cat stderr ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1389: "$PERL" -pi -e 'use strict; +input: +stdout: +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88696,34 +88269,93 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1392: $PREPARSER ./calc input input: +./calc.at:1397: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c + +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (#) + (#) = 2222 -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1391: cat stderr +./calc.at:1387: $PREPARSER ./calc input +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1390: cat stderr stderr: +./calc.at:1395: cat stderr +syntax error stderr: syntax error -error: 2222 != 1 +syntax error +syntax error +syntax error +error: 4444 != 1 +498. calc.at:1374: ok +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: syntax error: invalid character: '#' syntax error: invalid character: '#' -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr + | + | +1 input: - | error -./calc.at:1394: $PREPARSER ./calc input +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +stderr: +./calc.at:1397: $PREPARSER ./calc input input: ./calc.at:1389: cat stderr + | (#) + (#) = 2222 +syntax error, unexpected '+' stderr: - | (1 + 1) / (1 - 1) -./calc.at:1391: $PREPARSER ./calc input -syntax error -stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -1.11-17: error: null divisor -./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -501. calc.at:1389: ok +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: $PREPARSER ./calc input ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88734,10 +88366,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +./calc.at:1394: cat stderr stderr: -syntax error + | 1 + 2 * 3 + !+ ++ +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11-17: error: null divisor +stderr: +syntax error, unexpected '+' ./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88748,8 +88390,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +input: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1389: $PREPARSER ./calc input +input: +stderr: +input: +syntax error: invalid character: '#' + | 1 2 +./calc.at:1397: $PREPARSER ./calc input + + | 1 = 2 = 3 +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1392: cat stderr ./calc.at:1393: cat stderr -./calc.at:1391: "$PERL" -pi -e 'use strict; +stderr: +stderr: +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88759,8 +88422,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1394: "$PERL" -pi -e 'use strict; +1.3: syntax error +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: cat stderr +stderr: +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88771,43 +88450,48 @@ }eg ' expout || exit 77 input: - | (- *) + (1 2) = 1 + | 1 + 2 * 3 + !- ++ +syntax error: invalid character: '#' +input: +stderr: + | 1 = 2 = 3 ./calc.at:1393: $PREPARSER ./calc input -./calc.at:1391: cat stderr -./calc.at:1392: cat stderr stderr: -503. calc.at:1391: ok -./calc.at:1394: cat stderr +./calc.at:1391: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error +stderr: +./calc.at:1395: $PREPARSER ./calc /dev/null syntax error -error: 2222 != 1 +stderr: ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + #) = 1111 +stderr: +./calc.at:1390: cat stderr + | (!!) + (1 2) = 1 ./calc.at:1392: $PREPARSER ./calc input -input: +syntax error, unexpected end of input +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -510. calc.at:1400: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... stderr: - | 1 = 2 = 3 -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1400: mv calc.y.tmp calc.y - -syntax error: invalid character: '#' -syntax error +./calc.at:1387: cat stderr syntax error error: 2222 != 1 ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: - -stderr: -syntax error: invalid character: '#' -syntax error -./calc.at:1393: "$PERL" -pi -e 'use strict; +input: +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88817,7 +88501,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: "$PERL" -pi -e 'use strict; +syntax error +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88827,6 +88512,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: + | (1 + #) = 1111 ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88837,44 +88524,68 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1393: cat stderr -511. calc.at:1401: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose api.pure ... -./calc.at:1392: cat stderr -./calc.at:1401: mv calc.y.tmp calc.y +./calc.at:1390: $PREPARSER ./calc input +513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose ... +./calc.at:1403: mv calc.y.tmp calc.y -input: - | (* *) + (*) + (*) -./calc.at:1393: $PREPARSER ./calc input -input: stderr: -./calc.at:1394: cat stderr -syntax error syntax error -syntax error - | (# + 1) = 1111 -./calc.at:1392: $PREPARSER ./calc input -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1400: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -stderr: +error: 2222 != 1 +syntax error, unexpected end of input +./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1397: cat stderr +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: cat stderr input: -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -syntax error -syntax error -syntax error -./calc.at:1394: $PREPARSER ./calc input -stderr: +./calc.at:1389: cat stderr stderr: -syntax error: invalid character: '#' + | (1 + #) = 1111 +./calc.at:1387: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1394: cat stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (# + 1) = 1111 + | 1//2 stdout: -./calc.at:1395: "$PERL" -ne ' +./calc.at:1397: $PREPARSER ./calc input +stderr: +./calc.at:1389: $PREPARSER ./calc input +input: +./calc.at:1390: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1398: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -88885,19 +88596,13 @@ || /\t/ )' calc.c -./calc.at:1393: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1392: cat stderr stderr: +syntax error: invalid character: '#' +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1392: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88907,7 +88612,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.3: syntax error + | 1 + 2 * 3 + !* ++ +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1393: cat stderr +stderr: +./calc.at:1394: $PREPARSER ./calc input +input: +./calc.at:1395: cat stderr +stderr: + | (- *) + (1 2) = 1 +./calc.at:1392: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: +1.3: syntax error +syntax error +syntax error +error: 2222 != 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: syntax error +stderr: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -88921,12 +88656,27 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1395: $PREPARSER ./calc input +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1390: cat stderr +./calc.at:1393: $PREPARSER ./calc input stderr: -./calc.at:1393: cat stderr -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: cat stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +stderr: +1.14: memory exhausted +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +stderr: +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 +stderr: +input: +syntax error +stderr: +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88937,47 +88687,57 @@ }eg ' expout || exit 77 input: -stderr: -./calc.at:1401: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | 1 + 2 * 3 + !+ ++ -input: -./calc.at:1393: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -./calc.at:1392: $PREPARSER ./calc input -stderr: -input: -stderr: -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -syntax error: invalid character: '#' + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: ./calc.at:1395: $PREPARSER ./calc input -./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: syntax error: invalid character: '#' -./calc.at:1394: cat stderr -syntax error, unexpected number -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -syntax error, unexpected number - | 1 + 2 * 3 + !- ++ +1.2: syntax error: invalid character: '#' stderr: -./calc.at:1393: $PREPARSER ./calc input -stdout: -./types.at:139: $PREPARSER ./test +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1394: $PREPARSER ./calc /dev/null -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 stderr: -syntax error -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1392: "$PERL" -pi -e 'use strict; +1.14: memory exhausted +1.2: syntax error: invalid character: '#' +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1392: cat stderr +./calc.at:1397: cat stderr + | 1 2 +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -88987,6 +88747,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1398: $PREPARSER ./calc input ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -88997,10 +88758,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1395: cat stderr -445. types.at:139: ok -./calc.at:1392: cat stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89011,7 +88778,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89022,42 +88788,111 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -./calc.at:1395: $PREPARSER ./calc input +./calc.at:1403: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: - input: - | (1 + 1) / (1 - 1) -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.3: syntax error, unexpected number +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: cat stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1390: cat stderr +input: +./calc.at:1387: cat stderr +stderr: +stderr: +1.1: syntax error +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +1.3: syntax error, unexpected number ./calc.at:1392: $PREPARSER ./calc input -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1393: cat stderr +input: +input: +stderr: + | (!!) + (1 2) = 1 + | (1 + # + 1) = 1111 +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1395: $PREPARSER ./calc input stderr: +stderr: +1.6: syntax error: invalid character: '#' +1.1: syntax error +stderr: +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1394: cat stderr -error: null divisor +syntax error +syntax error +syntax error +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: cat stderr +./calc.at:1393: $PREPARSER ./calc /dev/null stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stderr: -input: +./calc.at:1391: cat stderr input: -error: null divisor - | 1 + 2 * 3 + !* ++ - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1394: $PREPARSER ./calc input stderr: +1.6: syntax error: invalid character: '#' stderr: -memory exhausted + | (# + 1) = 1111 +./calc.at:1387: $PREPARSER ./calc input +syntax error ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1394: $PREPARSER ./calc /dev/null +input: +stderr: syntax error syntax error syntax error +stderr: +stderr: +input: +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +syntax error: invalid character: '#' syntax error -error: 4444 != 1 +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; + | (#) + (#) = 2222 + | (1 + # + 1) = 1111 +./calc.at:1391: $PREPARSER ./calc input +./calc.at:1389: $PREPARSER ./calc input +./calc.at:1398: cat stderr +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89067,7 +88902,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1392: "$PERL" -pi -e 'use strict; +stderr: +stderr: +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89078,19 +88915,40 @@ }eg ' expout || exit 77 stderr: -memory exhausted -syntax error -syntax error -syntax error +input: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error -error: 4444 != 1 -512. calc.at:1402: testing Calculator %glr-parser parse.error=detailed %locations %header %name-prefix "calc" %verbose ... -./calc.at:1402: mv calc.y.tmp calc.y - + | 1//2 +./calc.at:1398: $PREPARSER ./calc input +stderr: +syntax error: invalid character: '#' +stderr: ./calc.at:1395: cat stderr -./calc.at:1402: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1392: cat stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1390: cat stderr +stderr: +./calc.at:1397: cat stderr +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' input: +stderr: ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89101,7 +88959,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error +input: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' + | (- *) + (1 2) = 1 +input: ./calc.at:1395: $PREPARSER ./calc input ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -89113,27 +88974,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1392: cat stderr + | (1 + 1) / (1 - 1) stderr: -504. calc.at:1392: ok -syntax error, unexpected invalid token -./calc.at:1393: cat stderr -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: cat stderr -stderr: -syntax error, unexpected invalid token -input: - - | (#) + (#) = 2222 -./calc.at:1393: $PREPARSER ./calc input -input: -stderr: - | (!!) + (1 2) = 1 -./calc.at:1394: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; +./calc.at:1390: $PREPARSER ./calc input +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89143,19 +88988,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 = 2 = 3 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number error: 2222 != 1 -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1395: cat stderr -./calc.at:1402: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: -syntax error -error: 2222 != 1 +1.11-17: error: null divisor +./calc.at:1390: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89165,16 +89020,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1395: $PREPARSER ./calc input -513. calc.at:1403: testing Calculator %glr-parser parse.error=verbose %locations %header %name-prefix "calc" %verbose ... -./calc.at:1403: mv calc.y.tmp calc.y - -stderr: -syntax error, unexpected '=' -./calc.at:1403: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1389: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89184,14 +89030,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error, unexpected '=' + | 1 + 2 * 3 + !+ ++ +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 ./calc.at:1393: cat stderr +./calc.at:1392: $PREPARSER ./calc input +stderr: +1.7: syntax error +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +stderr: +./calc.at:1398: cat stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1394: cat stderr +stderr: +./calc.at:1387: cat stderr +1.7: syntax error +stderr: +./calc.at:1389: cat stderr +./calc.at:1391: cat stderr input: - | (1 + #) = 1111 -./calc.at:1393: $PREPARSER ./calc input -./calc.at:1395: "$PERL" -pi -e 'use strict; +./calc.at:1390: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89201,32 +89061,169 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1393: $PREPARSER ./calc input stderr: input: -syntax error: invalid character: '#' +input: +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1389: $PREPARSER ./calc input +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 + | error + | (1 + # + 1) = 1111 +./calc.at:1387: $PREPARSER ./calc input ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | (- *) + (1 2) = 1 +stderr: ./calc.at:1394: $PREPARSER ./calc input -./calc.at:1395: cat stderr +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +./calc.at:1390: cat stderr stderr: stderr: +stderr: +error: null divisor +./calc.at:1389: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1400: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1395: cat stderr syntax error syntax error -error: 2222 != 1 +syntax error +syntax error +error: 4444 != 1 ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error: invalid character: '#' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + #) = 1111 +./calc.at:1391: $PREPARSER ./calc input +error: null divisor +stderr: +stderr: +input: +stderr: stderr: +502. calc.at:1390: ok +1.1: syntax error, unexpected invalid token input: syntax error syntax error -error: 2222 != 1 +syntax error +syntax error +error: 4444 != 1 +1.6: syntax error: invalid character: '#' + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 | - | +1 + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: cat stderr +./calc.at:1400: $PREPARSER ./calc input + | (* *) + (*) + (*) +stderr: ./calc.at:1395: $PREPARSER ./calc input stderr: -syntax error, unexpected '+' +syntax error: invalid character: '#' +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1389: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.6: syntax error: invalid character: '#' +stderr: ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89237,8 +89234,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1392: cat stderr +stderr: + + | + | +1 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1397: $PREPARSER ./calc input +input: +./calc.at:1389: cat stderr + | 1 2 +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1387: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1398: cat stderr stderr: -syntax error, unexpected '+' ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89249,7 +89280,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !* ++ +501. calc.at:1389: ok +./calc.at:1392: $PREPARSER ./calc input +stderr: +stderr: +2.1: syntax error +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1393: cat stderr +stderr: +stderr: +memory exhausted +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +input: +./calc.at:1387: cat stderr ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89260,40 +89308,126 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1398: $PREPARSER ./calc input +input: ./calc.at:1394: cat stderr +./calc.at:1395: cat stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1391: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1393: $PREPARSER ./calc input +memory exhausted +stderr: +stderr: +./calc.at:1400: cat stderr +input: +syntax error +error: 2222 != 1 + +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error, unexpected '=' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) input: +./calc.at:1387: $PREPARSER ./calc input +514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... +./calc.at:1405: mv calc.y.tmp calc.y + | (# + 1) = 1111 -./calc.at:1393: $PREPARSER ./calc input +stderr: +./calc.at:1391: $PREPARSER ./calc input input: - | (* *) + (*) + (*) -./calc.at:1395: cat stderr +input: +1.7: syntax error, unexpected '=' + | (!!) + (1 2) = 1 +stderr: ./calc.at:1394: $PREPARSER ./calc input +input: +./calc.at:1397: cat stderr stderr: +error: null divisor + | 1 + 2 * 3 + !+ ++ stderr: stderr: +./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1387: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: $PREPARSER ./calc input syntax error -syntax error -syntax error +error: 2222 != 1 + | 1//2 ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: $PREPARSER ./calc /dev/null -syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: +./calc.at:1400: $PREPARSER ./calc input +syntax error +error: 2222 != 1 +1.2: syntax error: invalid character: '#' +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1397: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stderr: -syntax error, unexpected end of input -./types.at:139: $PREPARSER ./test ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: +error: null divisor syntax error -syntax error -syntax error +error: 2222 != 1 +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1398: cat stderr +1.2: syntax error: invalid character: '#' stderr: +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -syntax error: invalid character: '#' stderr: -syntax error, unexpected end of input -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +1.1: syntax error +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +input: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1391: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89303,9 +89437,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -447. types.at:139: ok -stderr: -./calc.at:1393: "$PERL" -pi -e 'use strict; +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1387: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89315,20 +89448,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1397: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c + | + | +1 +./calc.at:1392: cat stderr +./calc.at:1398: $PREPARSER ./calc input +515. calc.at:1407: testing Calculator %glr-parser %debug ... +./calc.at:1407: mv calc.y.tmp calc.y -./calc.at:1394: cat stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; +./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1400: cat stderr +./calc.at:1387: cat stderr +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89338,76 +89468,134 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr -input: - +stderr: +2.1: syntax error, unexpected '+' +./calc.at:1391: cat stderr +stderr: ./calc.at:1393: cat stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1397: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1394: $PREPARSER ./calc input +stderr: +500. calc.at:1387: ok + | (#) + (#) = 2222 +2.1: syntax error, unexpected '+' +./calc.at:1392: $PREPARSER ./calc input +stderr: stderr: input: input: -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: $PREPARSER ./calc input + | error | (1 + # + 1) = 1111 +./calc.at:1391: $PREPARSER ./calc input +input: +./calc.at:1400: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' + | (- *) + (1 2) = 1 +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1397: cat stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1393: $PREPARSER ./calc input -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: cat stderr stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.6: syntax error: invalid character: '#' +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 stderr: +./calc.at:1405: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./types.at:139: $PREPARSER ./test +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1395: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: input: stderr: - | 1 2 -./calc.at:1397: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 +stderr: input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +syntax error +syntax error +error: 2222 != 1 + | (- *) + (1 2) = 1 +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1398: cat stderr + stderr: -514. calc.at:1405: testing Calculator %glr-parser parse.error=custom %locations %header %name-prefix "calc" %verbose ... - | 1 + 2 * 3 + !- ++ ./calc.at:1394: $PREPARSER ./calc input -1.3: syntax error -./calc.at:1405: mv calc.y.tmp calc.y - +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +1.6: syntax error: invalid character: '#' ./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: $PREPARSER ./calc /dev/null +./calc.at:1400: cat stderr +./calc.at:1395: cat stderr stderr: -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y stderr: -1.3: syntax error +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1395: "$PERL" -pi -e 'use strict; +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89417,6 +89605,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +syntax error +syntax error +error: 2222 != 1 +input: ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89427,6 +89620,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +./calc.at:1391: cat stderr +451. types.at:139: ok +1.1: syntax error, unexpected end of input +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89437,21 +89636,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1395: cat stderr +./calc.at:1400: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1395: $PREPARSER ./calc input stderr: -stdout: -./calc.at:1398: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c - -./calc.at:1393: cat stderr +./calc.at:1407: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS +input: +stderr: +./calc.at:1392: cat stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) + | (1 + 1) / (1 - 1) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1391: $PREPARSER ./calc input +1.1: syntax error, unexpected end of input +stderr: +stderr: +stderr: +memory exhausted +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89462,65 +89664,102 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (!!) + (1 2) = 1 +./calc.at:1393: cat stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +1.11-17: error: null divisor +./calc.at:1391: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... +./calc.at:1408: mv calc.y.tmp calc.y + ./calc.at:1397: cat stderr -./calc.at:1395: $PREPARSER ./calc input -input: stderr: + +./calc.at:1398: cat stderr input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1398: $PREPARSER ./calc input -syntax error, unexpected number -error: 2222 != 1 - | (1 + 1) / (1 - 1) -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +memory exhausted stderr: +./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y ./calc.at:1394: cat stderr +1.11-17: error: null divisor + | (* *) + (*) + (*) ./calc.at:1393: $PREPARSER ./calc input -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1400: cat stderr stderr: + | (1 + #) = 1111 +./calc.at:1392: $PREPARSER ./calc input +syntax error +syntax error +syntax error +input: input: -stderr: -error: null divisor ./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1//2 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1391: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (!!) + (1 2) = 1 ./calc.at:1397: $PREPARSER ./calc input -syntax error, unexpected number -error: 2222 != 1 stderr: -1.3: syntax error -input: stderr: input: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !* ++ + | (* *) + (*) + (*) +stderr: +syntax error: invalid character: '#' ./calc.at:1394: $PREPARSER ./calc input - | 1 2 -./calc.at:1398: $PREPARSER ./calc input -error: null divisor +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +input: +syntax error +syntax error +syntax error stderr: +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error, unexpected number -memory exhausted +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error + | + | +1 +stderr: +stderr: +./calc.at:1400: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +syntax error +syntax error +syntax error ./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89531,12 +89770,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1391: cat stderr +syntax error: invalid character: '#' +1.11: syntax error +1.1-16: error: 2222 != 1 stderr: -stderr: -memory exhausted -1.3: syntax error, unexpected number ./calc.at:1395: cat stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89546,6 +89788,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: ./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89556,8 +89799,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89567,14 +89809,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +input: + | (#) + (#) = 2222 +503. calc.at:1391: ok ./calc.at:1395: $PREPARSER ./calc input -./calc.at:1397: cat stderr stderr: +517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose ... +./calc.at:1409: mv calc.y.tmp calc.y + ./calc.at:1398: cat stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 ./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89585,27 +89829,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error: invalid character: '#' +syntax error: invalid character: '#' ./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | error -./calc.at:1393: cat stderr -./calc.at:1397: $PREPARSER ./calc input -stderr: -input: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -stderr: - | 1//2 -./calc.at:1394: cat stderr stderr: -./calc.at:1398: $PREPARSER ./calc input -1.1: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -505. calc.at:1393: ok +./calc.at:1392: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1400: cat stderr stdout: stderr: -./calc.at:1400: "$PERL" -ne ' +./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1401: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -89616,31 +89858,35 @@ || /\t/ )' calc.c calc.h +syntax error: invalid character: '#' +syntax error: invalid character: '#' + +./calc.at:1408: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1393: cat stderr input: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1394: $PREPARSER ./calc input +./calc.at:1400: $PREPARSER ./calc /dev/null + | (!!) + (1 2) = 1 stderr: -1.1: syntax error +./calc.at:1394: cat stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1397: cat stderr stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | 1 + 2 * 3 + !+ ++ +./calc.at:1392: cat stderr +input: +./calc.at:1393: $PREPARSER ./calc input input: stderr: -./calc.at:1395: cat stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' + | (- *) + (1 2) = 1 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -89654,8 +89900,15 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1401: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1394: $PREPARSER ./calc input +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89667,9 +89920,19 @@ ' expout || exit 77 stderr: stderr: - -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1392: $PREPARSER ./calc input +./calc.at:1395: cat stderr ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89680,22 +89943,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' - | (* *) + (*) + (*) -./calc.at:1395: $PREPARSER ./calc input stderr: stderr: -./calc.at:1397: cat stderr +stderr: +input: +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +syntax error: invalid character: '#' ./calc.at:1398: cat stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ input: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 +input: +./calc.at:1393: $PREPARSER ./calc input +input: +input: + | 1 + 2 * 3 + !- ++ +518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1394: $PREPARSER ./calc input + | (1 + #) = 1111 ./calc.at:1400: $PREPARSER ./calc input -./calc.at:1394: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1411: mv calc.y.tmp calc.y + +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89705,46 +89982,52 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1401: $PREPARSER ./calc input +stderr: +stderr: input: stderr: +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +stderr: +syntax error: invalid character: '#' + | (- *) + (1 2) = 1 +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: $PREPARSER ./calc input +stderr: 1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 ./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | error - | 1 = 2 = 3 -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1398: $PREPARSER ./calc input +./calc.at:1409: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: stderr: -1.7: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error, unexpected invalid token +syntax error: invalid character: '#' +./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1397: cat stderr +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 ./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: cat stderr stderr: -1.7: syntax error -./calc.at:1394: cat stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1//2 stderr: -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89754,24 +90037,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error, unexpected invalid token -stderr: -input: -./calc.at:1395: cat stderr -515. calc.at:1407: testing Calculator %glr-parser %debug ... - | (1 + #) = 1111 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1407: mv calc.y.tmp calc.y - -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1407: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -syntax error: invalid character: '#' -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1400: cat stderr input: -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89781,56 +90049,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1395: $PREPARSER ./calc input -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stderr: -stderr: -./calc.at:1397: cat stderr -./calc.at:1398: cat stderr -syntax error: invalid character: '#' -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: cat stderr -input: -stderr: - | - | +1 -input: + | (* *) + (*) + (*) ./calc.at:1397: $PREPARSER ./calc input -input: -stderr: - | 1 = 2 = 3 - | error -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1398: $PREPARSER ./calc input -input: -2.1: syntax error -stderr: -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; my @exps = $2 =~ /\[(.*?)\]/g; ($#exps && $#exps < 4) ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | 1 + 2 * 3 + !- ++ -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error, unexpected '=' -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error, unexpected '=' -stderr: -2.1: syntax error + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1395: cat stderr +input: ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89841,10 +90084,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1401: cat stderr +./calc.at:1392: cat stderr +./calc.at:1400: $PREPARSER ./calc input +input: + | (* *) + (*) + (*) stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1398: $PREPARSER ./calc input +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1394: cat stderr -./calc.at:1398: cat stderr +./calc.at:1393: cat stderr +stderr: +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +input: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +input: +./calc.at:1395: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' + | 1//2 +./calc.at:1401: $PREPARSER ./calc input +input: +stderr: +syntax error: invalid character: '#' ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89855,11 +90135,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1407: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c $LIBS -./calc.at:1400: cat stderr -input: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + # + 1) = 1111 +./calc.at:1392: $PREPARSER ./calc input input: -./calc.at:1395: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !* ++ +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1394: $PREPARSER ./calc input +stderr: +stderr: + | 1 + 2 * 3 + !* ++ +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89869,41 +90157,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 - | - | +1 -./calc.at:1394: $PREPARSER ./calc input -./calc.at:1398: $PREPARSER ./calc input -input: -./calc.at:1397: cat stderr +syntax error: invalid character: '#' +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1393: $PREPARSER ./calc input +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: - | 1 = 2 = 3 -./calc.at:1400: $PREPARSER ./calc input -syntax error: invalid character: '#' -2.1: syntax error, unexpected '+' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: cat stderr stderr: +memory exhausted ./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: $PREPARSER ./calc /dev/null -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +syntax error: invalid character: '#' +memory exhausted +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: cat stderr stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1400: cat stderr stderr: -1.1: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error: invalid character: '#' -2.1: syntax error, unexpected '+' +./calc.at:1397: cat stderr +memory exhausted +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) input: - | 1 + 2 * 3 + !* ++ -./calc.at:1395: $PREPARSER ./calc input -stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) + | 1 + 2 * 3 + !+ ++ +./calc.at:1398: $PREPARSER ./calc input stderr: stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; +input: +memory exhausted +./calc.at:1411: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89913,11 +90197,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -memory exhausted -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1398: cat stderr -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1400: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89927,11 +90212,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1400: cat stderr +./calc.at:1397: $PREPARSER ./calc input +./calc.at:1395: cat stderr stderr: -memory exhausted -./calc.at:1398: $PREPARSER ./calc /dev/null -./calc.at:1397: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1401: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89941,20 +90232,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.1: syntax error, unexpected end of input -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1394: cat stderr -stderr: - | - | +1 -./calc.at:1400: $PREPARSER ./calc input -stdout: -stderr: -stderr: -./types.at:139: $PREPARSER ./test -./calc.at:1395: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -89964,19 +90244,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -1.1: syntax error, unexpected end of input -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1397: cat stderr -./calc.at:1395: cat stderr input: + | 1 + 2 * 3 + !- ++ +input: +./calc.at:1398: $PREPARSER ./calc input +stderr: + | error stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) | (1 + # + 1) = 1111 -./calc.at:1394: $PREPARSER ./calc input +./calc.at:1401: $PREPARSER ./calc input stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: $PREPARSER ./calc input +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1392: cat stderr +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1393: cat stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1394: cat stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +input: +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1397: $PREPARSER ./calc input +input: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1400: cat stderr +stderr: +stderr: +syntax error: invalid character: '#' + | (1 + 1) / (1 - 1) +./calc.at:1392: $PREPARSER ./calc input ./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -89987,63 +90291,54 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: input: -./calc.at:1398: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -446. types.at:139: ok -./calc.at:1397: $PREPARSER ./calc input +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (#) + (#) = 2222 -./calc.at:1400: cat stderr -./calc.at:1395: $PREPARSER ./calc input -stderr: stderr: -syntax error: invalid character: '#' + | (#) + (#) = 2222 +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1393: $PREPARSER ./calc input +error: null divisor input: +./calc.at:1392: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: cat stderr stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1398: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1401: cat stderr + | (* *) + (*) + (*) +./calc.at:1400: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' syntax error: invalid character: '#' syntax error: invalid character: '#' -./calc.at:1400: $PREPARSER ./calc /dev/null stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +error: null divisor +input: stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | 1 + 2 * 3 + !* ++ +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) stderr: ./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 - +./calc.at:1398: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: +input: syntax error: invalid character: '#' syntax error: invalid character: '#' -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 +1.14: memory exhausted +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1394: "$PERL" -pi -e 'use strict; + | 1 = 2 = 3 +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90063,7 +90358,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1398: "$PERL" -pi -e 'use strict; +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1395: cat stderr +1.14: memory exhausted +./calc.at:1392: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90074,8 +90378,17 @@ }eg ' expout || exit 77 ./calc.at:1400: cat stderr -./calc.at:1398: cat stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90086,92 +90399,93 @@ }eg ' expout || exit 77 ./calc.at:1397: cat stderr +stderr: input: -./calc.at:1394: cat stderr +./calc.at:1398: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) + | (1 + 1) / (1 - 1) +./calc.at:1395: $PREPARSER ./calc input +./calc.at:1392: cat stderr +stderr: input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: + | 1 + 2 * 3 + !+ ++ +error: null divisor ./calc.at:1400: $PREPARSER ./calc input -./calc.at:1395: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1398: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +./calc.at:1393: cat stderr +./calc.at:1398: cat stderr +./calc.at:1394: cat stderr +./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: $PREPARSER ./calc input stderr: +./calc.at:1401: cat stderr +504. calc.at:1392: ok stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -input: ./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 +stderr: +1.14: memory exhausted input: -516. calc.at:1408: testing Calculator %glr-parser parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... -./calc.at:1408: mv calc.y.tmp calc.y - -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1394: $PREPARSER ./calc input +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1398: $PREPARSER ./calc input +input: +error: null divisor stderr: stderr: - | (!!) + (1 2) = 1 -stdout: -./calc.at:1397: $PREPARSER ./calc input -error: null divisor -./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: | (1 + #) = 1111 +input: +./calc.at:1394: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | + | +1 +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1401: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -1.11: syntax error -1.1-16: error: 2222 != 1 -stderr: + | (1 + #) = 1111 +./calc.at:1393: $PREPARSER ./calc input stderr: -./calc.at:1408: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -stdout: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.14: memory exhausted stderr: -stdout: -./calc.at:1403: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error: invalid character: '#' -./types.at:139: $PREPARSER ./test -error: null divisor -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1400: $PREPARSER ./calc input stderr: -input: -./calc.at:1400: cat stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1397: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1395: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90181,50 +90495,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1401: $PREPARSER ./calc input -stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1403: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 -syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1398: cat stderr -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: -450. types.at:139: ok -./calc.at:1394: "$PERL" -pi -e 'use strict; +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90234,11 +90506,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stdout: -input: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) stderr: -input: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1395: cat stderr +stderr: +./calc.at:1398: cat stderr +stdout: +./calc.at:1401: cat stderr +./calc.at:1397: cat stderr ./calc.at:1402: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -90250,16 +90528,11 @@ || /\t/ )' calc.c calc.h - | 1 2 - | (- *) + (1 2) = 1 -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1403: $PREPARSER ./calc input -stderr: - | (!!) + (1 2) = 1 -./calc.at:1400: $PREPARSER ./calc input -1.3: syntax error, unexpected number +507. calc.at:1395: ok +./calc.at:1400: cat stderr +./calc.at:1401: $PREPARSER ./calc /dev/null input: -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90269,9 +90542,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1413: mv calc.y.tmp calc.y + + | (1 + #) = 1111 +./calc.at:1398: $PREPARSER ./calc input stderr: -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +input: +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90281,23 +90562,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: $PREPARSER ./calc input -stderr: -input: -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1395: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -90311,40 +90575,39 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -stderr: ./calc.at:1402: $PREPARSER ./calc input -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1394: cat stderr -1.3: syntax error, unexpected number + | (#) + (#) = 2222 +./calc.at:1397: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1397: cat stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: +stderr: +./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | 1 + 2 * 3 + !* ++ +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1400: $PREPARSER ./calc input stderr: - -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1408: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -506. calc.at:1394: ok ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (- *) + (1 2) = 1 +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1398: cat stderr +./calc.at:1393: cat stderr +./calc.at:1394: cat stderr + +stderr: +stderr: +1.14: memory exhausted +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1401: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' input: -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90354,39 +90617,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: input: | (# + 1) = 1111 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1400: cat stderr -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1395: $PREPARSER ./calc input -./calc.at:1401: cat stderr - | 1 2 -./calc.at:1402: $PREPARSER ./calc input -stderr: input: +./calc.at:1393: $PREPARSER ./calc input +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1401: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 - | (* *) + (*) + (*) -./calc.at:1398: $PREPARSER ./calc input -1.3: syntax error, unexpected number -./calc.at:1403: cat stderr -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: cat stderr stderr: -input: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +syntax error: invalid character: '#' ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90397,44 +90638,48 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 - | 1//2 -stderr: -./calc.at:1401: $PREPARSER ./calc input - -./calc.at:1400: $PREPARSER ./calc input -syntax error: invalid character: '#' + | 1 2 +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.14: memory exhausted +./calc.at:1402: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1394: $PREPARSER ./calc input stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' stderr: +1.3: syntax error, unexpected number input: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) + | (# + 1) = 1111 +./calc.at:1398: $PREPARSER ./calc input +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1403: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1400: cat stderr 1.3: syntax error, unexpected number stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 +1.2: syntax error: invalid character: '#' +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 ./calc.at:1397: cat stderr -517. calc.at:1409: testing Calculator %glr-parser parse.error=verbose %debug %locations %header api.prefix={calc} api.token.prefix={TOK_} %verbose ... -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1409: mv calc.y.tmp calc.y - -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; +input: +stderr: +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90444,7 +90689,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1401: cat stderr +syntax error: invalid character: '#' +520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1414: mv calc.y.tmp calc.y + + | (#) + (#) = 2222 +./calc.at:1400: $PREPARSER ./calc input +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90454,10 +90706,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1400: cat stderr -./calc.at:1402: cat stderr -./calc.at:1401: cat stderr -./calc.at:1395: "$PERL" -pi -e 'use strict; +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90468,40 +90721,19 @@ }eg ' expout || exit 77 ./calc.at:1398: cat stderr -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1409: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -input: +./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y input: - | (* *) + (*) + (*) - | (* *) + (*) + (*) -./calc.at:1400: $PREPARSER ./calc input -./calc.at:1397: $PREPARSER ./calc input +./calc.at:1413: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS input: - | 1//2 stderr: -stderr: -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1395: cat stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -input: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1401: $PREPARSER ./calc input input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1398: $PREPARSER ./calc input -stderr: -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -518. calc.at:1411: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose ... -./calc.at:1403: "$PERL" -pi -e 'use strict; + | (1 + #) = 1111 +./calc.at:1402: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90511,39 +90743,48 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1411: mv calc.y.tmp calc.y - - | error -./calc.at:1401: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1397: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1398: $PREPARSER ./calc input input: stderr: stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.2: syntax error -1.10: syntax error -1.16: syntax error - | (1 + # + 1) = 1111 -./calc.at:1395: $PREPARSER ./calc input -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 input: -./calc.at:1403: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1411: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1393: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: + | 1//2 +./calc.at:1402: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' stderr: -syntax error: invalid character: '#' stderr: -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1400: cat stderr -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1394: cat stderr +1.6: syntax error: invalid character: '#' +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +input: +./calc.at:1401: cat stderr +input: ./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90554,17 +90795,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1400: $PREPARSER ./calc input -stderr: - | error -./calc.at:1403: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1402: "$PERL" -pi -e 'use strict; + | (1 + # + 1) = 1111 +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90574,14 +90806,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1402: cat stderr -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1409: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1395: "$PERL" -pi -e 'use strict; + | (1 + #) = 1111 +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1400: $PREPARSER ./calc input +input: +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90591,22 +90820,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr -input: -./calc.at:1397: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1400: $PREPARSER ./calc input -stderr: -input: -stderr: -./calc.at:1395: cat stderr - | error -./calc.at:1402: $PREPARSER ./calc input -1.1: syntax error, unexpected invalid token -stderr: -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90616,76 +90830,64 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test +1.6: syntax error: invalid character: '#' stderr: +syntax error: invalid character: '#' +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1398: cat stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1397: cat stderr +./calc.at:1402: cat stderr +./calc.at:1414: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS stderr: -input: stderr: - | 1 = 2 = 3 -./calc.at:1401: $PREPARSER ./calc input -1.1: syntax error, unexpected invalid token -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: cat stderr -input: -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1400: cat stderr -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) - | (1 + 1) / (1 - 1) +./calc.at:1393: cat stderr +1.6: syntax error: invalid character: '#' input: -./calc.at:1395: $PREPARSER ./calc input +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -449. types.at:139: ok - | 1 + 2 * 3 + !+ ++ -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +syntax error: invalid character: '#' + | (1 + 1) / (1 - 1) +./calc.at:1398: $PREPARSER ./calc input +input: +input: + | (# + 1) = 1111 ./calc.at:1397: $PREPARSER ./calc input input: -error: null divisor -./calc.at:1395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1393: $PREPARSER ./calc input stderr: -./calc.at:1403: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1400: $PREPARSER ./calc input +1.11-17: error: null divisor +./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1400: cat stderr + | error +./calc.at:1402: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 stderr: stderr: -1.14: memory exhausted +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr error: null divisor -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: cat stderr +1.11-17: error: null divisor +./calc.at:1393: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1401: cat stderr stderr: -1.14: memory exhausted -input: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1411: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | 1 + 2 * 3 + !* ++ -./calc.at:1395: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90695,57 +90897,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1401: cat stderr -./calc.at:1398: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1402: $PREPARSER ./calc input -stderr: -stderr: - | 1 = 2 = 3 -./calc.at:1395: cat stderr -./calc.at:1400: cat stderr -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: $PREPARSER ./calc input - -input: -1.14: memory exhausted -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: -1.7: syntax error, unexpected '=' -input: -input: -507. calc.at:1395: ok -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -1.7: syntax error, unexpected '=' - | (#) + (#) = 2222 -./calc.at:1401: $PREPARSER ./calc input ./calc.at:1400: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1397: $PREPARSER ./calc input -stderr: stderr: 1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error, unexpected '=' -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -stderr: -stderr: 1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.14: memory exhausted -./calc.at:1400: cat stderr +1.1: syntax error, unexpected invalid token stderr: -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +./calc.at:1398: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90755,8 +90916,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1403: "$PERL" -pi -e 'use strict; +input: +./calc.at:1398: cat stderr + | (* *) + (*) + (*) +./calc.at:1401: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1394: cat stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +509. calc.at:1398: ok +./calc.at:1393: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90766,11 +90939,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -stderr: -./calc.at:1402: cat stderr -input: -./calc.at:1398: "$PERL" -pi -e 'use strict; +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90780,14 +90949,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1400: $PREPARSER ./calc input -stderr: -./calc.at:1403: cat stderr -./calc.at:1398: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90798,47 +90960,52 @@ }eg ' expout || exit 77 input: -input: stderr: - | (#) + (#) = 2222 -./calc.at:1398: $PREPARSER ./calc input - | - | +1 -./calc.at:1402: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1393: cat stderr +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1394: $PREPARSER ./calc input +./calc.at:1400: cat stderr +stderr: +505. calc.at:1393: ok +error: null divisor +./calc.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: cat stderr +./calc.at:1402: cat stderr +stderr: input: ./calc.at:1401: cat stderr +error: null divisor + | (1 + # + 1) = 1111 +./calc.at:1400: $PREPARSER ./calc input + +input: + | (1 + # + 1) = 1111 stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: $PREPARSER ./calc input +input: +input: + | 1 + 2 * 3 + !+ ++ 1.6: syntax error: invalid character: '#' +./calc.at:1401: $PREPARSER ./calc input stderr: -2.1: syntax error, unexpected '+' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1403: $PREPARSER ./calc input -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1397: cat stderr -stderr: +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1402: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -2.1: syntax error, unexpected '+' -./calc.at:1401: $PREPARSER ./calc /dev/null -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1400: cat stderr -519. calc.at:1413: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... stderr: - | 1 + 2 * 3 + !* ++ stderr: -./calc.at:1413: mv calc.y.tmp calc.y +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error, unexpected '=' +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: $PREPARSER ./calc input -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1398: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1394: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90848,11 +91015,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -520. calc.at:1414: testing Calculator %glr-parser api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: mv calc.y.tmp calc.y +stderr: +1.7: syntax error, unexpected '=' +stderr: +stdout: +input: +stderr: + | 1 + 2 * 3 + !- ++ +1.6: syntax error: invalid character: '#' +./calc.at:1400: cat stderr +./calc.at:1401: $PREPARSER ./calc input +./calc.at:1403: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h -2.1: syntax error, unexpected '+' +stderr: ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90863,31 +91047,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1394: cat stderr +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -./calc.at:1398: cat stderr -./calc.at:1413: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... + | (1 + 1) / (1 - 1) input: -1.14: memory exhausted -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 ./calc.at:1400: $PREPARSER ./calc input -input: -stderr: -stderr: - | (1 + #) = 1111 -./calc.at:1398: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y -stderr: +./calc.at:1416: mv calc.y.tmp calc.y + ./calc.at:1402: cat stderr -1.14: memory exhausted stderr: -1.6: syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -./calc.at:1403: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90897,35 +91083,58 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: $PREPARSER ./calc /dev/null +1.11-17: error: null divisor +./calc.at:1401: cat stderr +./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +506. calc.at:1394: ok +stderr: +input: +522. calc.at:1426: testing Calculator lalr1.cc %header ... +./calc.at:1426: mv calc.y.tmp calc.y + +input: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1402: $PREPARSER ./calc input stderr: stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1400: cat stderr -1.1: syntax error, unexpected end of file + | 1 + 2 * 3 + !* ++ +./calc.at:1401: $PREPARSER ./calc input +stderr: +1.11-17: error: null divisor +./calc.at:1397: cat stderr +2.1: syntax error, unexpected '+' +./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y +./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr +1.14: memory exhausted +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1403: cat stderr +1.14: memory exhausted +input: + + | 1 2 +./calc.at:1403: $PREPARSER ./calc input +input: stderr: -1.1: syntax error, unexpected end of file -stdout: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1397: $PREPARSER ./calc input +2.1: syntax error, unexpected '+' +./calc.at:1400: cat stderr +1.3: syntax error, unexpected number +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1401: cat stderr +1.11-17: error: null divisor +./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +510. calc.at:1400: ok +1.3: syntax error, unexpected number input: - | (1 + # + 1) = 1111 -./calc.at:1403: $PREPARSER ./calc /dev/null -./calc.at:1400: $PREPARSER ./calc input -./types.at:139: $PREPARSER ./test +stderr: ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -90936,16 +91145,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (#) + (#) = 2222 +1.11-17: error: null divisor +./calc.at:1401: $PREPARSER ./calc input stderr: -stderr: -1.6: syntax error: invalid character: '#' -1.1: syntax error, unexpected end of input -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1402: cat stderr -stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90955,35 +91161,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: -1.1: syntax error, unexpected end of input -./calc.at:1397: cat stderr -1.6: syntax error: invalid character: '#' - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: cat stderr -./calc.at:1401: $PREPARSER ./calc input -input: -./calc.at:1413: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1402: $PREPARSER ./calc input -input: -./calc.at:1400: cat stderr - | (# + 1) = 1111 -451. types.at:139: ok -./calc.at:1398: $PREPARSER ./calc input -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 ./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr + stderr: -stderr: -stderr: -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1426: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1397: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -90994,31 +91177,14 @@ }eg ' expout || exit 77 1.2: syntax error: invalid character: '#' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -input: - | (#) + (#) = 2222 -./calc.at:1403: cat stderr -stdout: +1.8: syntax error: invalid character: '#' +./calc.at:1402: $PREPARSER ./calc /dev/null stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1397: $PREPARSER ./calc input -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 +./calc.at:1403: cat stderr stderr: - | (1 + 1) / (1 - 1) -./calc.at:1400: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +stdout: +1.1: syntax error, unexpected end of file +./calc.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS ./calc.at:1405: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -91030,23 +91196,22 @@ || /\t/ )' calc.c calc.h -stderr: -stderr: -1.11-17: error: null divisor -./calc.at:1400: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -stderr: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1401: cat stderr +./calc.at:1397: cat stderr +input: stderr: -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 + | 1//2 +523. calc.at:1431: testing Calculator C++ ... +./calc.at:1431: mv calc.y.tmp calc.y + +./calc.at:1403: $PREPARSER ./calc input input: -1.11-17: error: null divisor + | (1 + #) = 1111 +1.1: syntax error, unexpected end of file +input: +./calc.at:1401: $PREPARSER ./calc input +stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -91060,10 +91225,12 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1405: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1398: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91074,32 +91241,16 @@ }eg ' expout || exit 77 stderr: - -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1398: cat stderr -stderr: -input: - | (1 + # + 1) = 1111 -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +508. calc.at:1397: ok stderr: 1.6: syntax error: invalid character: '#' -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1400: cat stderr -input: +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (!!) + (1 2) = 1 +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +524. calc.at:1432: testing Calculator C++ %locations ... stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91109,25 +91260,57 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.6: syntax error: invalid character: '#' +./calc.at:1432: mv calc.y.tmp calc.y + +./calc.at:1403: cat stderr +./calc.at:1402: cat stderr +input: +./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | 1 2 +./calc.at:1405: $PREPARSER ./calc input +stderr: +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +input: + | error +input: +stderr: +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1401: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1402: $PREPARSER ./calc input +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +stderr: +1.1: syntax error, unexpected invalid token +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +./calc.at:1431: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1405: cat stderr +1.1: syntax error, unexpected invalid token +stderr: + | (# + 1) = 1111 ./calc.at:1401: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: 1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' 1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.1-46: error: 4444 != 1 -510. calc.at:1400: ok +1.2: syntax error: invalid character: '#' +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1398: "$PERL" -pi -e 'use strict; +input: +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91137,13 +91320,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -input: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: cat stderr -./calc.at:1403: "$PERL" -pi -e 'use strict; +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91153,55 +91330,65 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 + | 1//2 ./calc.at:1405: $PREPARSER ./calc input -./calc.at:1398: cat stderr +1.2: syntax error: invalid character: '#' stderr: -521. calc.at:1416: testing Calculator %glr-parser %no-lines api.pure parse.error=verbose %debug %locations %header api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1416: mv calc.y.tmp calc.y - -stdout: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr +./calc.at:1432: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1401: cat stderr +./calc.at:1403: cat stderr stderr: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ... +./calc.at:1405: cat stderr input: -./calc.at:1403: cat stderr -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./types.at:139: $PREPARSER ./test +./calc.at:1433: mv calc.y.tmp calc.y + input: - | (1 + 1) / (1 - 1) -./calc.at:1398: $PREPARSER ./calc input -./calc.at:1397: cat stderr -stderr: +input: + | (1 + # + 1) = 1111 +./calc.at:1401: $PREPARSER ./calc input | (!!) + (1 2) = 1 stderr: -1.11-17: error: null divisor +input: +1.6: syntax error: invalid character: '#' ./calc.at:1402: $PREPARSER ./calc input -./calc.at:1398: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + | error +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.c calc.y + | 1 = 2 = 3 stderr: -1.11-17: error: null divisor +1.6: syntax error: invalid character: '#' +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 +stderr: ./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -input: +./calc.at:1401: cat stderr +1.7: syntax error, unexpected '=' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (1 + #) = 1111 1.11: syntax error, unexpected number 1.1-16: error: 2222 != 1 -448. types.at:139: ok -./calc.at:1397: $PREPARSER ./calc input input: - | (!!) + (1 2) = 1 -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1398: "$PERL" -pi -e 'use strict; + | (1 + 1) / (1 - 1) +./calc.at:1401: $PREPARSER ./calc input +stderr: +stderr: +1.7: syntax error, unexpected '=' +stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +1.11-17: error: null divisor +./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -91212,16 +91399,6 @@ }eg ' expout || exit 77 stderr: -./calc.at:1405: cat stderr -stderr: -./calc.at:1398: cat stderr -./calc.at:1401: cat stderr -1.6: syntax error: invalid character: '#' -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -91233,16 +91410,10 @@ }eg ' expout || exit 77 stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -input: -stdout: -stderr: -509. calc.at:1398: ok -1.6: syntax error: invalid character: '#' - | 1//2 -./calc.at:1405: $PREPARSER ./calc input +1.11-17: error: null divisor ./calc.at:1402: cat stderr +stdout: +./calc.at:1403: cat stderr ./calc.at:1407: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -91254,37 +91425,10 @@ || /\t/ )' calc.c -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | (- *) + (1 2) = 1 - -./calc.at:1403: cat stderr +./calc.at:1401: cat stderr +./calc.at:1405: cat stderr input: input: -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -91298,25 +91442,20 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1407: $PREPARSER ./calc input -522. calc.at:1426: testing Calculator lalr1.cc %header ... | (- *) + (1 2) = 1 +./calc.at:1407: $PREPARSER ./calc input ./calc.at:1402: $PREPARSER ./calc input -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: - | (- *) + (1 2) = 1 -./calc.at:1405: cat stderr +511. calc.at:1401: ok + | + | +1 ./calc.at:1403: $PREPARSER ./calc input stderr: -./calc.at:1426: mv calc.y.tmp calc.y - stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -92153,33 +92292,21 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o calc calc.c calc-lex.c calc-main.c $LIBS -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: + | 1 = 2 = 3 +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: 1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' 1.12: syntax error, unexpected number 1.1-17: error: 2222 != 1 - | error -./calc.at:1405: $PREPARSER ./calc input stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 +2.1: syntax error, unexpected '+' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -92945,2596 +93072,80 @@ Next token is token ')' (1.1: ) Reducing stack 0 by rule 12 (line 117): $1 = nterm exp (1.1: 2) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 4) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 4) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 4) -Entering state 8 -Reading a token -Next token is token '^' (1.1: ) -Shifting token '^' (1.1: ) -Entering state 24 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 33 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 12 (line 117): - $1 = nterm exp (1.1: 4) - $2 = token '^' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 64) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 64) -Shifting token "number" (1.1: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 64) --> $$ = nterm exp (1.1: 64) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 64) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 64) --> $$ = nterm exp (1.1: 64) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 64) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm input (1.1: ) - $2 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1397: cat stderr -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -./calc.at:1426: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | 1 2 -./calc.at:1407: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: cat stderr -./calc.at:1401: cat stderr -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.1: 2) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -523. calc.at:1431: testing Calculator C++ ... -./calc.at:1431: mv calc.y.tmp calc.y - -./calc.at:1431: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1402: cat stderr -stderr: -input: -input: - | (* *) + (*) + (*) -./calc.at:1401: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1403: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1405: cat stderr -stderr: -524. calc.at:1432: testing Calculator C++ %locations ... -./calc.at:1432: mv calc.y.tmp calc.y - -./calc.at:1432: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: -stderr: -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.1: 2) - | (* *) + (*) + (*) -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: $PREPARSER ./calc input -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | 1 = 2 = 3 -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1397: cat stderr -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -stderr: -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -input: - | (1 + # + 1) = 1111 -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1397: $PREPARSER ./calc input -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1403: cat stderr -1.6: syntax error: invalid character: '#' -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1401: cat stderr -./calc.at:1402: cat stderr -./calc.at:1432: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1407: cat stderr -stderr: -input: -input: -1.6: syntax error: invalid character: '#' -./calc.at:1405: cat stderr - | 1 + 2 * 3 + !+ ++ -input: -./calc.at:1402: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1403: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1401: $PREPARSER ./calc input -stderr: -stderr: -stderr: -input: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1//2 -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: - | - | +1 -stderr: -./calc.at:1405: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.1: ) -stderr: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -input: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1397: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1403: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -stderr: - | 1 + 2 * 3 + !- ++ -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '/' (1.1: ) -syntax error -Error: popping token '/' (1.1: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.1: ) -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1401: $PREPARSER ./calc input -stderr: -stderr: -stderr: -stderr: -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1397: $PREPARSER ./calc input -stderr: -stderr: -stderr: -1.11-17: error: null divisor -stderr: -./calc.at:1397: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1405: cat stderr -stderr: -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1405: $PREPARSER ./calc /dev/null -./calc.at:1401: cat stderr -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1403: cat stderr -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1407: cat stderr -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1397: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1402: cat stderr -input: -stderr: -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -input: - | 1 + 2 * 3 + !* ++ -./calc.at:1401: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !* ++ -stderr: -./calc.at:1403: $PREPARSER ./calc input - | error -1.14: memory exhausted -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -./calc.at:1397: cat stderr -1.14: memory exhausted - | 1 + 2 * 3 + !* ++ -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: cat stderr -stderr: -stderr: -1.14: memory exhausted -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.14: memory exhausted -stderr: -stderr: -508. calc.at:1397: ok -1.14: memory exhausted -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -syntax error -Cleanup: discarding lookahead token "invalid token" (1.1: ) -1.14: memory exhausted - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1401: cat stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - - | (#) + (#) = 2222 -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1403: cat stderr -stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1402: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (#) + (#) = 2222 -stderr: -./calc.at:1403: $PREPARSER ./calc input -./calc.at:1405: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -stderr: -./calc.at:1407: cat stderr - | (#) + (#) = 2222 -./calc.at:1402: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1401: cat stderr -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (!!) + (1 2) = 1 -input: -./calc.at:1405: $PREPARSER ./calc input -525. calc.at:1433: testing Calculator C++ %locations $NO_EXCEPTIONS_CXXFLAGS ... -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1433: mv calc.y.tmp calc.y - -stderr: - | 1 = 2 = 3 -./calc.at:1407: $PREPARSER ./calc input -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) -Error: popping token '=' (1.1: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -stderr: -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -stderr: -stderr: -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.1: ) -syntax error -Error: popping nterm exp (1.1: 2) -Error: popping token '=' (1.1: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.1: ) -./calc.at:1405: cat stderr -./calc.at:1403: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1402: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1403: $PREPARSER ./calc input -input: -input: -stderr: - | (- *) + (1 2) = 1 -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1401: cat stderr -./calc.at:1402: $PREPARSER ./calc input -./calc.at:1405: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1433: $CXX $CPPFLAGS $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -input: -./calc.at:1407: cat stderr -stderr: -stderr: - | (# + 1) = 1111 -./calc.at:1401: $PREPARSER ./calc input -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -1.6: syntax error: invalid character: '#' -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | - | +1 -./calc.at:1407: $PREPARSER ./calc input -stderr: -./calc.at:1405: cat stderr -./calc.at:1403: cat stderr -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Cleanup: discarding lookahead token '+' (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1402: cat stderr -./calc.at:1401: cat stderr -stderr: - | (* *) + (*) + (*) -./calc.at:1405: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 76): - $1 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Next token is token '+' (1.1: ) -syntax error -Error: popping nterm input (1.1: ) -Cleanup: discarding lookahead token '+' (1.1: ) - | (# + 1) = 1111 -./calc.at:1403: $PREPARSER ./calc input -stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -stderr: - | (1 + # + 1) = 1111 -1.2: syntax error: invalid character: '#' -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -./calc.at:1402: $PREPARSER ./calc input -stderr: -stderr: -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -stderr: -stderr: -./calc.at:1405: cat stderr -1.2: syntax error: invalid character: '#' -stderr: -stdout: -input: -./calc.at:1407: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1408: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - - | 1 + 2 * 3 + !+ ++ -./calc.at:1405: $PREPARSER ./calc input -./calc.at:1403: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1407: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -./calc.at:1403: cat stderr -./calc.at:1401: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1408: $PREPARSER ./calc input -stderr: -input: -./calc.at:1402: cat stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" (1.1: ) - | 1 + 2 * 3 + !- ++ -stderr: -./calc.at:1405: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 19 -Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 28 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -input: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1403: $PREPARSER ./calc input -input: -./calc.at:1401: $PREPARSER ./calc input -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 28 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 21 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 22 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 31 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 19 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 28 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 24 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 33 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 19 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 28 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '1.6: syntax error: invalid character: '#' -(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 24 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 33 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 19 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 28 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 19 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 28 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 20 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 29 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 20 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 29 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 19 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 123): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 28 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 20 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 20 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 29 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 29 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 19 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 28 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 24 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 33 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 24 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 33 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 33 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 19 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 28 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 24 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 33 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 4) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 4) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 4) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) +Next token is token '^' (1.1: ) +Shifting token '^' (1.1: ) Entering state 24 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) Entering state 33 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 124): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 12 (line 117): + $1 = nterm exp (1.1: 4) + $2 = token '^' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 64) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token "number" (1.1: 64) +Shifting token "number" (1.1: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 64) +-> $$ = nterm exp (1.1: 64) Entering state 28 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 64) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 64) +-> $$ = nterm exp (1.1: 64) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 64) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 2 (line 72): + $1 = nterm input (1.1: ) + $2 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (1.1: ) Entering state 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -1.11-17: error: null divisor +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) stderr: -./calc.at:1401: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1402: $PREPARSER ./calc input input: -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -95544,65 +93155,49 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1433: $CXX $CPPFLAGS $CXXFLAGS $NO_EXCEPTIONS_CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | 1 2 +./calc.at:1407: $PREPARSER ./calc input stderr: -./calc.at:1408: $PREPARSER ./calc input -1.11-17: error: null divisor -stderr: -stderr: -1.6: syntax error: invalid character: '#' +2.1: syntax error, unexpected '+' +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) stderr: -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr Starting parse Entering state 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token "number" (1.1: 2) +syntax error Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stdout: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' +Cleanup: discarding lookahead token "number" (1.1: 2) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr stderr: -./calc.at:1409: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - Starting parse Entering state 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token "number" (1.1: 2) +syntax error Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stderr: -./calc.at:1405: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1407: cat stderr +Cleanup: discarding lookahead token "number" (1.1: 2) +input: ./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -95613,26 +93208,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1401: cat stderr -input: + | (* *) + (*) + (*) +./calc.at:1402: $PREPARSER ./calc input +stderr: +./calc.at:1403: cat stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: $PREPARSER ./calc /dev/null +stderr: +1.1: syntax error, unexpected end of input +stderr: +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' input: - | 1 + 2 * 3 + !* ++ -./calc.at:1405: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1408: "$PERL" -pi -e 'use strict; + | +1 +./calc.at:1405: $PREPARSER ./calc input +stderr: +stderr: +1.1: syntax error, unexpected end of input +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -95642,15 +93241,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -511. calc.at:1401: ok -1.14: memory exhausted -./calc.at:1403: cat stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./calc.at:1402: "$PERL" -pi -e 'use strict; +./calc.at:1402: cat stderr +526. calc.at:1434: testing Calculator C++ %locations api.location.type={Span} ... +./calc.at:1434: mv calc.y.tmp calc.y + +./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -95660,7 +93257,34 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1411: "$PERL" -ne ' +./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +stderr: +./calc.at:1405: cat stderr +input: +./calc.at:1405: $PREPARSER ./calc /dev/null + | 1 + 2 * 3 + !+ ++ +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1403: cat stderr +stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: +stdout: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: cat stderr +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -95672,8 +93296,99 @@ )' calc.c calc.h stderr: +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +stderr: input: -1.14: memory exhausted +input: +./calc.at:1405: cat stderr + | 1 + 2 * 3 + !- ++ +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1402: $PREPARSER ./calc input +./calc.at:1403: $PREPARSER ./calc input +stderr: +stderr: +input: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: $PREPARSER ./calc input +input: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1407: $PREPARSER ./calc input +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1409: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Reading a token +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.1: ) +stderr: +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +./calc.at:1403: cat stderr Starting parse Entering state 0 Reading a token @@ -96510,306 +94225,45 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1408: cat stderr ./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1407: $PREPARSER ./calc input -input: -./calc.at:1402: cat stderr -input: - | (1 + 1) / (1 - 1) stderr: -./calc.at:1403: $PREPARSER ./calc input -input: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 3) -Shifting token error (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Error: popping nterm exp (1.1: 2) -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1408: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1405: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) Entering state 23 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token '/' (1.1: ) +syntax error +Error: popping token '/' (1.1: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +Cleanup: discarding lookahead token '/' (1.1: ) +./calc.at:1405: cat stderr +input: stderr: +./calc.at:1402: cat stderr +./calc.at:1434: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | (!!) + (1 2) = 1 +./calc.at:1403: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -97646,263 +95100,284 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -stdout: -./calc.at:1411: $PREPARSER ./calc input -1.11-17: error: null divisor -./types.at:139: $PREPARSER ./test +input: +stderr: +input: + | 1 + 2 * 3 + !* ++ +./calc.at:1402: $PREPARSER ./calc input +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 + | (!!) + (1 2) = 1 ./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: $PREPARSER ./calc input input: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 2 +./calc.at:1409: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 30 +Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +1.14: memory exhausted +stderr: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +stderr: +1.14: memory exhausted +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: cat stderr +Starting parse +Entering state 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 3) -Entering state 12 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token ')' (1.1: ) -syntax error -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 3) -Shifting token error (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +stderr: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: cat stderr +./calc.at:1402: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1405: cat stderr +input: +input: + | (#) + (#) = 2222 + | (- *) + (1 2) = 1 +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1402: $PREPARSER ./calc input +stderr: +stderr: +input: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1407: $PREPARSER ./calc input +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1409: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '*' (1.1: ) +Next token is token "invalid token" (1.1: ) syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +input: +stderr: +stdout: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 + | (- *) + (1 2) = 1 +./calc.at:1405: $PREPARSER ./calc input +input: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "invalid token" (1.1: ) +syntax error +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1408: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + + | 1//2 +./calc.at:1409: $PREPARSER ./calc input +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: cat stderr +Starting parse +Entering state 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 1) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Error: popping token '*' (1.1: ) -Error: popping nterm exp (1.1: 2) -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Entering state 8 Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 30 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 3333) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 4444) -Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1403: cat stderr +input: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +input: +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | (1 + #) = 1111 +./calc.at:1402: $PREPARSER ./calc input +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +input: +Starting parse +Entering state 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 4444) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 4444 != 1 --> $$ = nterm exp (1.1: 4444) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 4444) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) + | (* *) + (*) + (*) +./calc.at:1403: $PREPARSER ./calc input stderr: stderr: - +stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -97939,14 +95414,14 @@ Entering state 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -98021,20 +95496,20 @@ Entering state 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) Entering state 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -98057,7 +95532,7 @@ Entering state 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -98121,14 +95596,14 @@ Entering state 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -98150,7 +95625,7 @@ Entering state 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -98193,7 +95668,7 @@ Entering state 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -98201,8 +95676,9 @@ Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (5.1: ) +Reducing stack 0 by rule 13 (line 125): + $1 = token './calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -98221,7 +95697,7 @@ Entering state 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -98294,19 +95770,19 @@ Entering state 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -98328,7 +95804,7 @@ Entering state 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -98388,7 +95864,7 @@ Entering state 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -98407,7 +95883,7 @@ Entering state 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -98430,7 +95906,7 @@ Entering state 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -98493,7 +95969,7 @@ Entering state 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -98502,7 +95978,7 @@ Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -98510,7 +95986,7 @@ Entering state 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -98595,14 +96071,14 @@ Entering state 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) Entering state 33 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -98666,7 +96142,7 @@ Entering state 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -98675,7 +96151,7 @@ Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -98695,7 +96171,7 @@ Entering state 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -98739,47 +96215,25 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1402: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: - | (#) + (#) = 2222 -./calc.at:1405: $PREPARSER ./calc input -1.11-17: error: null divisor -stderr: - | 1 2 -1.11-17: error: null divisor -./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1409: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1407: cat stderr stderr: -./types.at:139: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: cat stderr Starting parse Entering state 0 Reading a token @@ -98816,14 +96270,14 @@ Entering state 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -98898,20 +96352,20 @@ Entering state 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 31 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 101): +Reducing stack 0 by rule 9 (line 107): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) -> $$ = nterm exp (2.5-10: -6) Entering state 30 Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 99): +Reducing stack 0 by rule 7 (line 105): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -98934,7 +96388,7 @@ Entering state 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) @@ -98998,14 +96452,14 @@ Entering state 33 Reading a token Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) -> $$ = nterm exp (4.2-4: 1) Entering state 10 Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -99027,7 +96481,7 @@ Entering state 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) @@ -99070,7 +96524,7 @@ Entering state 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -99078,9 +96532,8 @@ Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '('stderr: - (5.1: ) +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -99099,7 +96552,7 @@ Entering state 33 Reading a token Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -99172,19 +96625,19 @@ Entering state 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -99206,7 +96659,7 @@ Entering state 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) @@ -99266,7 +96719,7 @@ Entering state 29 Reading a token Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -99285,7 +96738,7 @@ Entering state 29 Reading a token Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -99308,7 +96761,7 @@ Entering state 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 111): +Reducing stack 0 by rule 11 (line 123): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) @@ -99371,7 +96824,7 @@ Entering state 29 Reading a token Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -99380,7 +96833,7 @@ Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -99388,7 +96841,7 @@ Entering state 29 Reading a token Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 100): +Reducing stack 0 by rule 8 (line 106): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -99473,14 +96926,14 @@ Entering state 33 Reading a token Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) -> $$ = nterm exp (12.3-5: 8) Entering state 33 Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -99544,7 +96997,7 @@ Entering state 33 Reading a token Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -99553,7 +97006,7 @@ Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -99573,7 +97026,7 @@ Entering state 33 Reading a token Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): +Reducing stack 0 by rule 12 (line 124): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -99617,7 +97070,11 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; +stderr: +input: +input: +1.6: syntax error: invalid character: '#' +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -99627,8 +97084,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (* *) + (*) + (*) +./calc.at:1405: $PREPARSER ./calc input + | 1 2 +./calc.at:1408: $PREPARSER ./calc input stderr: input: + | 1 = 2 = 3 +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -99644,8 +97112,10 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1403: "$PERL" -pi -e 'use strict; +input: +./calc.at:1402: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -99655,30 +97125,106 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: cat stderr +./calc.at:1403: cat stderr +stderr: + | error +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Error: popping token '=' (1.1: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1409: $PREPARSER ./calc input +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) ./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -452. types.at:139: ok -1.11-17: error: null divisor - | 1 2 stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stdout: -./calc.at:1413: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -./calc.at:1411: $PREPARSER ./calc input input: +./calc.at:1402: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.1: ) +syntax error +Error: popping nterm exp (1.1: 2) +Error: popping token '=' (1.1: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.1: ) + | 1 + 2 * 3 + !+ ++ +./calc.at:1403: $PREPARSER ./calc input stderr: -./calc.at:1402: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1405: cat stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -99688,9 +97234,39 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +input: stderr: - | error -./calc.at:1408: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1402: $PREPARSER ./calc input +input: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1405: $PREPARSER ./calc input +stderr: +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1403: $PREPARSER ./calc input +./calc.at:1409: cat stderr +1.2: syntax error: invalid character: '#' ./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -99701,6 +97277,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +input: +1.2: syntax error: invalid character: '#' + | 1//2 +input: +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1405: $PREPARSER ./calc input +./calc.at:1408: $PREPARSER ./calc input +stderr: + | 1 = 2 = 3 +./calc.at:1409: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -99712,11 +97305,30 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1407: cat stderr +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: Starting parse Entering state 0 Reading a token @@ -99728,15 +97340,179 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: cat stderr +input: + | + | +1 ./calc.at:1403: cat stderr +./calc.at:1407: $PREPARSER ./calc input +stderr: +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1408: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Cleanup: discarding lookahead token '+' (1.1: ) +input: +input: ./calc.at:1405: cat stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1402: $PREPARSER ./calc input + | 1 + 2 * 3 + !* ++ +stderr: +./calc.at:1403: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 76): + $1 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Next token is token '+' (1.1: ) +syntax error +Error: popping nterm input (1.1: ) +Cleanup: discarding lookahead token '+' (1.1: ) +1.14: memory exhausted +input: +input: +1.6: syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error + | 1 + 2 * 3 + !* ++ +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1405: $PREPARSER ./calc input +stderr: +stderr: +1.14: memory exhausted +./calc.at:1409: cat stderr +stderr: stderr: -./calc.at:1402: cat stderr -526. calc.at:1434: testing Calculator C++ %locations api.location.type={Span} ... Starting parse Entering state 0 Reading a token @@ -99744,8 +97520,32 @@ 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token "invalid token" (1.1: ) ./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +1.14: memory exhausted +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' stderr: +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.14: memory exhausted +input: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -99753,25 +97553,1940 @@ Next token is token "invalid token" (1.1: ) 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1434: mv calc.y.tmp calc.y - + | + | +1 +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1403: cat stderr +stderr: +./calc.at:1407: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1402: cat stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1405: cat stderr +./calc.at:1407: $PREPARSER ./calc /dev/null +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +input: +./calc.at:1408: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1402: $PREPARSER ./calc input +stderr: + | (#) + (#) = 2222 +./calc.at:1403: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (#) + (#) = 2222 +1.11-17: error: null divisor +./calc.at:1402: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: $PREPARSER ./calc input +input: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: $PREPARSER ./calc input +1.11-17: error: null divisor +stderr: +stderr: +./calc.at:1409: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1402: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1402: cat stderr +./calc.at:1405: cat stderr +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: cat stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +512. calc.at:1402: ok +stderr: +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +input: +input: | (1 + #) = 1111 +./calc.at:1405: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1408: cat stderr +./calc.at:1403: $PREPARSER ./calc input +stderr: +stderr: +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: cat stderr + +input: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +./calc.at:1407: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' + | + | +1 +./calc.at:1408: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1409: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 3) +Shifting token error (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Error: popping nterm exp (1.1: 2) +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 3) +Entering state 12 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token ')' (1.1: ) +syntax error +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 3) +Shifting token error (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 1) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Error: popping token '*' (1.1: ) +Error: popping nterm exp (1.1: 2) +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 3333) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 4444) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 4444 != 1 +-> $$ = nterm exp (1.1: 4444) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 4444) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +input: +./calc.at:1403: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1405: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose ... +1.2: syntax error: invalid character: '#' +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' +input: +./calc.at:1408: cat stderr +./calc.at:1435: mv calc.y.tmp calc.y + +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1403: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: cat stderr +./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1407: cat stderr +./calc.at:1408: $PREPARSER ./calc /dev/null +./calc.at:1409: cat stderr +input: +stderr: +stderr: +input: + | (!!) + (1 2) = 1 +./calc.at:1407: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) + | (1 + # + 1) = 1111 +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1405: $PREPARSER ./calc input +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1409: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 121): + $1 = token '!' (1.1: ) + $2 = token '!' (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Reading a token +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) +Shifting token error (1.1: ) +Entering state 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +stderr: +stderr: +stderr: +1.6: syntax error: invalid character: '#' +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stdout: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1403: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.6: syntax error: invalid character: '#' +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: cat stderr +input: +input: + | (- *) + (1 2) = 1 +./calc.at:1408: cat stderr +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + # + 1) = 1111 +./calc.at:1403: $PREPARSER ./calc input input: +stderr: +./calc.at:1405: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -99785,15 +99500,246 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1413: $PREPARSER ./calc input -512. calc.at:1402: ok -./calc.at:1405: $PREPARSER ./calc input stderr: -513. calc.at:1403: ok +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.6: syntax error: invalid character: '#' -./calc.at:1407: cat stderr -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1409: cat stderr +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 2 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 120): + $1 = token '-' (1.1: ) + $2 = token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.1: 2) +syntax error +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token "number" (1.1: 2) +Error: discarding token "number" (1.1: 2) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1111) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1) +error: 2222 != 1 +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: stderr: Starting parse Entering state 0 @@ -100616,63 +100562,282 @@ Shifting token '\n' (13.13-14.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1435: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1408: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +stderr: + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1405: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: cat stderr -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -input: -./calc.at:1408: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1409: cat stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - Starting parse Entering state 0 Reading a token @@ -101509,10 +101674,36 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -input: + | (- *) + (1 2) = 1 +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1409: $PREPARSER ./calc input +1.11-17: error: null divisor +./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - input: + | 1 2 +./calc.at:1403: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -101520,122 +101711,116 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Shifting token error (1.1: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' (1.1: ) +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1405: cat stderr -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1411: $PREPARSER ./calc input - | 1 2 -input: - | 1 = 2 = 3 -./calc.at:1413: $PREPARSER ./calc input -input: - | 1//2 -./calc.at:1408: $PREPARSER ./calc input -stderr: +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -101647,295 +101832,379 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -./calc.at:1409: $PREPARSER ./calc input +Cleanup: discarding lookahead token "number" (1.3: 2) stderr: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1403: cat stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -527. calc.at:1435: testing Calculator C++ %header %locations parse.error=verbose %name-prefix "calc" %verbose ... -Starting parse -Entering state 0 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -stderr: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: mv calc.y.tmp calc.y - -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 121): - $1 = token '!' (1.1: ) - $2 = token '!' (1.1: ) -Shifting token error (1.1: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token "number" (1.1: 2) -syntax error -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.1: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +1.11-17: error: null divisor +stderr: +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (# + 1) = 1111 -./calc.at:1435: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1405: $PREPARSER ./calc input -Starting parse -Entering state 0 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose ... -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1437: mv calc.y.tmp calc.y - -./calc.at:1434: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -101951,22 +102220,9 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: cat stderr -stderr: -1.2: syntax error: invalid character: '#' ./calc.at:1407: cat stderr +input: +./calc.at:1405: cat stderr ./calc.at:1408: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -101987,7 +102243,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; + | (1 + 1) / (1 - 1) +./calc.at:1403: $PREPARSER ./calc input +input: +stderr: +./calc.at:1409: cat stderr +514. calc.at:1405: ok +./calc.at:1408: cat stderr + | (* *) + (*) + (*) +1.11-17: error: null divisor +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -101997,67 +102263,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose ... -./calc.at:1409: cat stderr -./calc.at:1413: cat stderr -./calc.at:1438: mv calc.y.tmp calc.y - -./calc.at:1408: cat stderr -./calc.at:1405: cat stderr -input: -input: - | (- *) + (1 2) = 1 - | error -input: -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1411: $PREPARSER ./calc input -input: -./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | error -stderr: -./calc.at:1409: $PREPARSER ./calc input -input: -./calc.at:1435: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) - | 1//2 -./calc.at:1413: $PREPARSER ./calc input +./calc.at:1403: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) - | (1 + # + 1) = 1111 -Starting parse -Entering state 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Reading a token Next token is token '*' (1.1: ) syntax error Shifting token error (1.1: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Shifting token error (1.1: ) Entering state 11 Next token is token '*' (1.1: ) Error: discarding token '*' (1.1: ) Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token Next token is token ')' (1.1: ) Entering state 11 Next token is token ')' (1.1: ) @@ -102078,21 +102302,12 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.1: 2) +Next token is token '*' (1.1: ) syntax error -Error: popping nterm exp (1.1: 1) Shifting token error (1.1: ) Entering state 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -102106,38 +102321,52 @@ -> $$ = nterm exp (1.1: 1111) Entering state 30 Reading a token -Next token is token '=' (1.1: ) +Next token is token '+' (1.1: ) Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 1111) $2 = token '+' (1.1: ) $3 = nterm exp (1.1: 1111) -> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 Reading a token Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): +Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 3333) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -102151,56 +102380,276 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -./calc.at:1405: $PREPARSER ./calc input ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +input: +input: + | (* *) + (*) + (*) +./calc.at:1409: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -input: -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor + | (!!) + (1 2) = 1 ./calc.at:1408: $PREPARSER ./calc input stderr: +stdout: Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + stderr: +input: stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 Starting parse Entering state 0 Reading a token @@ -102208,22 +102657,16 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 2 -Reading a token Next token is token '*' (1.1: ) syntax error Shifting token error (1.1: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 120): - $1 = token '-' (1.1: ) - $2 = token error (1.1: ) -Shifting token error (1.1: ) Entering state 11 Next token is token '*' (1.1: ) Error: discarding token '*' (1.1: ) Reading a token +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token Next token is token ')' (1.1: ) Entering state 11 Next token is token ')' (1.1: ) @@ -102244,21 +102687,12 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.1: 2) +Next token is token '*' (1.1: ) syntax error -Error: popping nterm exp (1.1: 1) Shifting token error (1.1: ) Entering state 11 -Next token is token "number" (1.1: 2) -Error: discarding token "number" (1.1: 2) +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -102272,38 +102706,52 @@ -> $$ = nterm exp (1.1: 1111) Entering state 30 Reading a token -Next token is token '=' (1.1: ) +Next token is token '+' (1.1: ) Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 1111) $2 = token '+' (1.1: ) $3 = nterm exp (1.1: 1111) -> $$ = nterm exp (1.1: 2222) Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 28 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.1: ) +syntax error +Shifting token error (1.1: ) +Entering state 11 +Next token is token '*' (1.1: ) +Error: discarding token '*' (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 30 Reading a token Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): +Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1) -error: 2222 != 1 --> $$ = nterm exp (1.1: 2222) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 3333) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 3333) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -102317,78 +102765,107 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -stderr: +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1409: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -Starting parse -Entering state 0 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -Starting parse -Entering state 0 +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -102396,81 +102873,16 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1437: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: -./calc.at:1409: cat stderr -stdout: -./calc.at:1405: cat stderr - | 1 = 2 = 3 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -input: -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1403: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -102480,17 +102892,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: cat stderr -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1438: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1407: cat stderr input: -input: -stderr: -./calc.at:1408: cat stderr -stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -102504,7 +102906,9 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1414: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -102516,28 +102920,20 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) - | (1 + 1) / (1 - 1) -./calc.at:1405: $PREPARSER ./calc input +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +stderr: +stderr: +./calc.at:1403: cat stderr Starting parse Entering state 0 Reading a token @@ -102549,9 +102945,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -102559,16 +102955,162 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 128): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -input: -stderr: +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -103405,159 +103947,98 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1408: $PREPARSER ./calc /dev/null -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1407: $PREPARSER ./calc input - | error -./calc.at:1413: $PREPARSER ./calc input -1.11-17: error: null divisor stderr: -./calc.at:1405: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token '*' (1.1: ) -syntax error -Shifting token error (1.1: ) -Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 30 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +513. calc.at:1403: ok stderr: Starting parse Entering state 0 @@ -103570,9 +104051,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -103580,15 +104061,71 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 28 +Entering state 30 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1411: cat stderr +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -104369,63 +104906,2283 @@ -> $$ = nterm exp (13.1-5: 4) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 24 +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 19 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 28 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (14.1: ) +Entering state 17 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +stderr: +input: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | 1 2 +./calc.at:1414: $PREPARSER ./calc input +stderr: + +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +528. calc.at:1437: testing Calculator C++ %locations parse.error=verbose api.prefix={calc} %verbose ... +input: +./calc.at:1437: mv calc.y.tmp calc.y + +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) + | (- *) + (1 2) = 1 +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1407: cat stderr +./calc.at:1409: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | error +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 127): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1414: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1409: $PREPARSER ./calc input +input: +stderr: + | 1 + 2 * 3 + !+ ++ +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1408: cat stderr + | 1//2 +./calc.at:1414: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 122): + $1 = token '!' (1.1: ) + $2 = token '+' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | (* *) + (*) + (*) +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1409: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 123): + $1 = token '!' (1.1: ) + $2 = token '-' (1.1: ) +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: + | (#) + (#) = 2222 +./calc.at:1409: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1437: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1414: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +529. calc.at:1438: testing Calculator C++ %locations parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1438: mv calc.y.tmp calc.y + +./calc.at:1438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1411: cat stderr +input: + | error +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 = 2 = 3 +stderr: +./calc.at:1411: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1408: cat stderr +./calc.at:1407: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +input: +stderr: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1409: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) + | 1 + 2 * 3 + !* ++ +stderr: +./calc.at:1407: $PREPARSER ./calc input +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) +stderr: +./calc.at:1411: cat stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 129): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +stderr: +./calc.at:1414: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 2) +Shifting token "number" (1.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2) +-> $$ = nterm exp (1.1: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 22 +Reading a token +Next token is token "number" (1.1: 3) +Shifting token "number" (1.1: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 3) +-> $$ = nterm exp (1.1: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 9 (line 100): + $1 = nterm exp (1.1: 2) + $2 = token '*' (1.1: ) + $3 = nterm exp (1.1: 3) +-> $$ = nterm exp (1.1: 6) +Entering state 30 +Next token is token '+' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 6) +-> $$ = nterm exp (1.1: 7) +Entering state 8 +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token '!' (1.1: ) +Shifting token '!' (1.1: ) +Entering state 5 +Reading a token +Next token is token '*' (1.1: ) +Shifting token '*' (1.1: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 124): + $1 = token '!' (1.1: ) + $2 = token '*' (1.1: ) +memory exhausted +Cleanup: popping token '+' (1.1: ) +Cleanup: popping nterm exp (1.1: 7) + | 1 + 2 * 3 + !- ++ +input: +./calc.at:1408: $PREPARSER ./calc input +input: + | (1 + #) = 1111 +./calc.at:1409: $PREPARSER ./calc input +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | 1 = 2 = 3 +./calc.at:1414: $PREPARSER ./calc input + | + | +1 +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 130): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1438: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 33 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 112): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 18 -Reducing stack 0 by rule 2 (line 79): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: cat stderr +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr +./calc.at:1409: cat stderr +input: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr + | (#) + (#) = 2222 +./calc.at:1407: $PREPARSER ./calc input +input: +input: +stderr: +./calc.at:1411: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -104433,15 +107190,12 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.1: ) -syntax error +syntax error: invalid character: '#' +Next token is token error (1.1: ) Shifting token error (1.1: ) Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) -Reading a token -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -104463,12 +107217,12 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.1: ) -syntax error +syntax error: invalid character: '#' +Next token is token error (1.1: ) Shifting token error (1.1: ) Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -104482,13 +107236,105 @@ -> $$ = nterm exp (1.1: 1111) Entering state 30 Reading a token -Next token is token '+' (1.1: ) +Next token is token '=' (1.1: ) Reducing stack 0 by rule 7 (line 98): $1 = nterm exp (1.1: 1111) $2 = token '+' (1.1: ) $3 = nterm exp (1.1: 1111) -> $$ = nterm exp (1.1: 2222) Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2222) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) + | 1 + 2 * 3 + !* ++ +./calc.at:1408: $PREPARSER ./calc input + | + | +1 +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: $PREPARSER ./calc input +stderr: +./calc.at:1411: $PREPARSER ./calc /dev/null +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 @@ -104497,12 +107343,12 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.1: ) -syntax error +syntax error: invalid character: '#' +Next token is token error (1.1: ) Shifting token error (1.1: ) Entering state 11 -Next token is token '*' (1.1: ) -Error: discarding token '*' (1.1: ) +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) Reading a token Next token is token ')' (1.1: ) Entering state 11 @@ -104516,18 +107362,37 @@ -> $$ = nterm exp (1.1: 1111) Entering state 30 Reading a token -Next token is token '\n' (1.1: ) +Next token is token '=' (1.1: ) Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 1111) $2 = token '+' (1.1: ) $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 3333) +-> $$ = nterm exp (1.1: 2222) +Entering state 8 +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 2222) +Shifting token "number" (1.1: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 2222) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 2222) +-> $$ = nterm exp (1.1: 2222) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 3333) + $1 = nterm exp (1.1: 2222) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -104541,63 +107406,6 @@ Entering state 17 Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) -1.11-17: error: null divisor -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -input: -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1405: cat stderr -input: - | 1 2 -./calc.at:1416: $PREPARSER ./calc input - | - | +1 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1409: cat stderr -stderr: -514. calc.at:1405: ok -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -104609,10 +107417,63 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: Starting parse @@ -104634,62 +107495,64 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: cat stderr stderr: -input: - | - | +1 -./calc.at:1409: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1414: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.c calc.h - -stderr: -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - -Starting parse -Entering state 0 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -104697,19 +107560,84 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -input: +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -104717,16 +107645,121 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1407: cat stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -input: -./calc.at:1408: $PREPARSER ./calc input -./calc.at:1416: "$PERL" -pi -e 'use strict; +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 107): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 131): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr +stderr: +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -104736,6 +107769,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1411: cat stderr +stdout: +./calc.at:1413: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +./calc.at:1408: cat stderr +./calc.at:1414: $PREPARSER ./calc /dev/null +input: +./calc.at:1409: cat stderr +./calc.at:1407: cat stderr +input: +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -104749,29 +107808,15 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1413: cat stderr -./calc.at:1414: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1413: $PREPARSER ./calc input +input: +./calc.at:1411: $PREPARSER ./calc input +input: +input: + | (#) + (#) = 2222 stderr: +./calc.at:1408: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -104786,7 +107831,7 @@ Next token is token ')' (1.2: ) Shifting token ')' (1.2: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.2: ) @@ -104822,7 +107867,7 @@ Entering state 30 Reading a token Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.7: 1) $2 = token '+' (1.9: ) $3 = nterm exp (1.11: 1) @@ -104841,7 +107886,7 @@ Entering state 30 Reading a token Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.7-11: 2) $2 = token '+' (1.13: ) $3 = nterm exp (1.15: 1) @@ -104860,7 +107905,7 @@ Next token is token ')' (1.18: ) Shifting token ')' (1.18: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.6: ) $2 = token error (1.7-18: ) $3 = token ')' (1.18: ) @@ -104868,7 +107913,7 @@ Entering state 30 Reading a token Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-2: 1111) $2 = token '+' (1.4: ) $3 = nterm exp (1.6-18: 1111) @@ -104900,7 +107945,7 @@ Next token is token ')' (1.28: ) Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.22: ) $2 = token error (1.23-27: ) $3 = token ')' (1.28: ) @@ -104908,7 +107953,7 @@ Entering state 30 Reading a token Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-18: 2222) $2 = token '+' (1.20: ) $3 = nterm exp (1.22-28: 1111) @@ -104943,7 +107988,7 @@ Entering state 31 Reading a token Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.33: 1) $2 = token '*' (1.35: ) $3 = nterm exp (1.37: 2) @@ -104967,7 +108012,7 @@ Next token is token ')' (1.42: ) Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.32: ) $2 = token error (1.33-41: ) $3 = token ')' (1.42: ) @@ -104975,7 +108020,7 @@ Entering state 30 Reading a token Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-28: 3333) $2 = token '+' (1.30: ) $3 = nterm exp (1.32-42: 1111) @@ -105019,20 +108064,14 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +stderr: + | (1 + # + 1) = 1111 +./calc.at:1407: $PREPARSER ./calc input stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: cat stderr -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: Starting parse Entering state 0 Reading a token @@ -105869,94 +108908,6 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | 1 + 2 * 3 + !+ ++ -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: cat stderr -stderr: -input: -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | 1 = 2 = 3 -530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose ... -./calc.at:1413: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: $PREPARSER ./calc /dev/null -stderr: Starting parse Entering state 0 Reading a token @@ -105964,234 +108915,84 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) Shifting token error (1.2: ) Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token ')' (1.42: ) +Next token is token ')' (1.9: ) Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Reading a token -Next token is token '=' (1.44: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -106204,42 +109005,97 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: mv calc.y.tmp calc.y - -input: - | 1//2 +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1409: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) stderr: -./calc.at:1409: cat stderr -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) Entering state 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -107076,1492 +109932,6 @@ Entering state 17 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -input: -stderr: -stderr: -./calc.at:1409: $PREPARSER ./calc /dev/null -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1414: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 122): - $1 = token '!' (1.1: ) - $2 = token '+' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -stderr: -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) - | 1 + 2 * 3 + !- ++ -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: cat stderr -stderr: -stderr: -./calc.at:1407: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -input: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1431: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1408: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 123): - $1 = token '!' (1.1: ) - $2 = token '-' (1.1: ) -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1413: cat stderr -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1411: cat stderr -input: -stderr: -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1409: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1414: cat stderr -input: -./calc.at:1416: cat stderr -stderr: -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | - | +1 -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) - | 1//2 -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1408: cat stderr -stderr: -stderr: - | error -./calc.at:1416: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -108604,1475 +109974,199 @@ Entering state 21 Reading a token Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -stderr: - | (- *) + (1 2) = 1 - | 1 2 -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1408: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1407: cat stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -./calc.at:1409: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1407: $PREPARSER ./calc input -syntax error -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 128): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -./calc.at:1411: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 23 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 2) -Shifting token "number" (1.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2) --> $$ = nterm exp (1.1: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 22 -Reading a token -Next token is token "number" (1.1: 3) -Shifting token "number" (1.1: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 3) --> $$ = nterm exp (1.1: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 9 (line 100): - $1 = nterm exp (1.1: 2) - $2 = token '*' (1.1: ) - $3 = nterm exp (1.1: 3) --> $$ = nterm exp (1.1: 6) -Entering state 30 -Next token is token '+' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 6) --> $$ = nterm exp (1.1: 7) -Entering state 8 -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token '!' (1.1: ) -Shifting token '!' (1.1: ) -Entering state 5 -Reading a token -Next token is token '*' (1.1: ) -Shifting token '*' (1.1: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 124): - $1 = token '!' (1.1: ) - $2 = token '*' (1.1: ) -memory exhausted -Cleanup: popping token '+' (1.1: ) -Cleanup: popping nterm exp (1.1: 7) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (!!) + (1 2) = 1 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1413: cat stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1416: cat stderr -./calc.at:1414: cat stderr -input: - | (- *) + (1 2) = 1 -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1408: cat stderr -stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1431: cat stderr -Starting parse -Entering state 0 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -input: -./calc.at:1407: cat stderr - | error -stderr: -./calc.at:1414: $PREPARSER ./calc input -Starting parse -Entering state 0 +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 127): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token ')' (1.5: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -110085,104 +110179,30 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: cat stderr - | 1 = 2 = 3 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: stderr: -./calc.at:1416: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -input: - | (#) + (#) = 2222 -input: -./calc.at:1407: $PREPARSER ./calc input -stderr: - | (* *) + (*) + (*) - | 1//2 -./calc.at:1431: $PREPARSER ./calc input -Starting parse -Entering state 0 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1408: $PREPARSER ./calc input -stderr: -./calc.at:1413: cat stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' (1.1: ) Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token syntax error: invalid character: '#' Next token is token error (1.1: ) +Error: popping token '+' (1.1: ) +Error: popping nterm exp (1.1: 1) Shifting token error (1.1: ) Entering state 11 Next token is token error (1.1: ) @@ -110198,39 +110218,32 @@ $2 = token error (1.1: ) $3 = token ')' (1.1: ) -> $$ = nterm exp (1.1: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) Entering state 8 +Reading a token Next token is token '=' (1.1: ) Shifting token '=' (1.1: ) Entering state 19 Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 28 Reading a token Next token is token '\n' (1.1: ) Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 1111) $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) Entering state 8 Next token is token '\n' (1.1: ) Shifting token '\n' (1.1: ) Entering state 25 Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) + $1 = nterm exp (1.1: 1111) $2 = token '\n' (1.1: ) -> $$ = nterm line (1.1: ) Entering state 7 @@ -110245,18 +110258,6 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) stderr: -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: Starting parse Entering state 0 Reading a token @@ -110264,42 +110265,32 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token ')' (1.5: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token Next token is token ')' (1.11: ) Entering state 11 @@ -110307,59 +110298,37 @@ Shifting token ')' (1.11: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -110372,11 +110341,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -syntax error -stderr: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -110384,110 +110349,131 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.1: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): +Reducing stack 0 by rule 14 (line 126): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token ')' (1.1: ) +Next token is token ')' (1.9: ) Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 30 Reading a token -Next token is token '=' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1111) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 2222) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.1: 2222) -Shifting token "number" (1.1: 2222) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 2222) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 2222) --> $$ = nterm exp (1.1: 2222) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2222) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) +Shifting token "end of input" (2.1: ) Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1411: $PREPARSER ./calc input +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1 2 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1413: $PREPARSER ./calc input +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr Starting parse Entering state 0 Reading a token @@ -110495,103 +110481,70 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token ')' (1.5: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Error: discarding token "number" (1.10: 1) Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -110604,9 +110557,35 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: cat stderr stderr: -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +./calc.at:1409: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -110616,10 +110595,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +input: stderr: -./calc.at:1409: cat stderr +./calc.at:1408: $PREPARSER ./calc input +input: + | (!!) + (1 2) = 1 +./calc.at:1411: $PREPARSER ./calc input +stderr: +./calc.at:1407: cat stderr stderr: +./calc.at:1409: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: +./calc.at:1414: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -110627,102 +110633,176 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token ')' (1.5: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -110735,7 +110815,21 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stdout: stderr: +./calc.at:1431: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + Starting parse Entering state 0 Reading a token @@ -110743,103 +110837,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Reading a token -Next token is token '=' (1.15: ) +Next token is token '=' (1.14: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 19 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -110852,35 +110941,90 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (# + 1) = 1111 +input: +stderr: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1409: $PREPARSER ./calc input +./calc.at:1407: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -111128,13 +111272,196 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: cat stderr -input: -syntax error stderr: -stdout: - | (* *) + (*) + (*) -./calc.at:1411: "$PERL" -pi -e 'use strict; +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111144,9 +111471,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111156,28 +111496,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - stderr: -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1413: cat stderr stderr: Starting parse Entering state 0 @@ -111186,101 +111506,101 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 Reading a token -Next token is token '+' (1.13: ) +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 Reading a token Next token is token ')' (1.17: ) -Entering state 11 +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -111294,20 +111614,80 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error (1.1: ) +Shifting token error (1.1: ) +Entering state 11 +Next token is token error (1.1: ) +Error: discarding token error (1.1: ) +Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token +Next token is token ')' (1.1: ) +Entering state 11 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 119): + $1 = token '(' (1.1: ) + $2 = token error (1.1: ) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.1: ) +Shifting token '=' (1.1: ) +Entering state 19 +Reading a token +Next token is token "number" (1.1: 1111) +Shifting token "number" (1.1: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 6 (line 82): + $1 = nterm exp (1.1: 1111) + $2 = token '=' (1.1: ) + $3 = nterm exp (1.1: 1111) +-> $$ = nterm exp (1.1: 1111) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 1111) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) ./calc.at:1411: cat stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1408: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -111555,23 +111935,36 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1409: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr input: - | 1 = 2 = 3 -./calc.at:1414: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1432: $PREPARSER ./calc input +input: + | 1//2 +./calc.at:1413: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1408: cat stderr +./calc.at:1411: $PREPARSER ./calc input +stderr: +./calc.at:1409: cat stderr stderr: Starting parse Entering state 0 @@ -111580,14 +111973,20 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) Reading a token Next token is token ')' (1.5: ) @@ -111595,7 +111994,7 @@ Next token is token ')' (1.5: ) Shifting token ')' (1.5: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-4: ) $3 = token ')' (1.5: ) @@ -111610,71 +112009,66 @@ Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 105): +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -111688,7 +112082,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -111700,42 +112094,21 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -stderr: -./calc.at:1416: cat stderr +Cleanup: discarding lookahead token '/' (1.3: ) input: - | (* *) + (*) + (*) -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1407: cat stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; + | (# + 1) = 1111 +./calc.at:1408: $PREPARSER ./calc input +input: +stderr: +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111745,7 +112118,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; + | 1 2 +./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -111755,45 +112129,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: cat stderr -stderr: -input: -input: -stderr: - | - | +1 -./calc.at:1416: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 19 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 28 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1413: cat stderr +./calc.at:1431: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -111801,14 +112137,20 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) Reading a token Next token is token ')' (1.5: ) @@ -111831,72 +112173,137 @@ Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' (1.15: ) Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.17: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -111909,26 +112316,71 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: $PREPARSER ./calc input -input: -./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (1 + #) = 1111 -./calc.at:1407: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +517. calc.at:1409: ok stderr: +./calc.at:1414: cat stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -111936,10 +112388,12 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: Starting parse Entering state 0 Reading a token @@ -111951,62 +112405,180 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1407: cat stderr +input: +stderr: +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1411: cat stderr +syntax error + +./calc.at:1414: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 +./calc.at:1408: cat stderr +input: +./calc.at:1407: $PREPARSER ./calc input +stderr: +./calc.at:1413: cat stderr + | (* *) + (*) + (*) +./calc.at:1411: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + # + 1) = 1111 +input: +./calc.at:1408: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -112122,7 +112694,9 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -112132,7 +112706,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: + | error +stderr: +./calc.at:1413: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -112161,6 +112737,12 @@ Next token is token error (1.1: ) Error: discarding token error (1.1: ) Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token Next token is token ')' (1.1: ) Entering state 11 Next token is token ')' (1.1: ) @@ -112211,160 +112793,6 @@ Cleanup: popping token "end of input" (1.1: ) Cleanup: popping nterm input (1.1: ) ./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 - | 1 + 2 * 3 + !+ ++ -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1431: cat stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | 1 2 -./calc.at:1414: cat stderr -input: Starting parse Entering state 0 Reading a token @@ -112476,7 +112904,299 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1432: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +stderr: +./calc.at:1431: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 126): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: stderr: Starting parse @@ -112507,6 +113227,12 @@ Next token is token error (1.1: ) Error: discarding token error (1.1: ) Reading a token +Next token is token '+' (1.1: ) +Error: discarding token '+' (1.1: ) +Reading a token +Next token is token "number" (1.1: 1) +Error: discarding token "number" (1.1: 1) +Reading a token Next token is token ')' (1.1: ) Entering state 11 Next token is token ')' (1.1: ) @@ -112559,179 +113285,100 @@ Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 129): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: cat stderr -stderr: +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1408: cat stderr input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ - | error +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: cat stderr +stderr: + | 1//2 +./calc.at:1411: cat stderr +stdout: ./calc.at:1431: $PREPARSER ./calc input +./calc.at:1433: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +530. calc.at:1440: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} %verbose ... stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1440: mv calc.y.tmp calc.y + +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +./calc.at:1440: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: -./calc.at:1408: $PREPARSER ./calc input -1.3: syntax error - | 1 + 2 * 3 + !- ++ -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: $PREPARSER ./calc input - | - | +1 -stderr: + | (- *) + (1 2) = 1 ./calc.at:1414: $PREPARSER ./calc input -stderr: + | (1 + 1) / (1 - 1) input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1408: $PREPARSER ./calc input +./calc.at:1413: cat stderr stderr: +./calc.at:1407: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error | 1 + 2 * 3 + !+ ++ ./calc.at:1411: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -112768,14 +113415,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -112789,38 +113436,14 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -stderr: -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -112828,98 +113451,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.15: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -112932,99 +113560,23 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1433: $PREPARSER ./calc input stderr: -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -113091,189 +113643,134 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) stderr: +input: Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 83): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -Starting parse -Entering state 0 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -Starting parse -Entering state 0 +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 130): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 = 2 = 3 +./calc.at:1407: cat stderr +./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: $PREPARSER ./calc input +input: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !- ++ ./calc.at:1411: $PREPARSER ./calc input -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: cat stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -113340,41 +113837,6 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1407: cat stderr -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1416: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -113386,9 +113848,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -113396,175 +113858,16 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -./calc.at:1409: cat stderr -input: -./calc.at:1431: cat stderr -./calc.at:1408: cat stderr -./calc.at:1414: $PREPARSER ./calc /dev/null -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) - | (# + 1) = 1111 -./calc.at:1407: $PREPARSER ./calc input -./calc.at:1413: cat stderr -stderr: -./calc.at:1432: cat stderr -input: -input: -stderr: - | 1 = 2 = 3 -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1431: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !* ++ -stderr: -./calc.at:1409: $PREPARSER ./calc input -input: -input: - | (- *) + (1 2) = 1 -syntax error -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1411: cat stderr +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) stderr: - | 1//2 -./calc.at:1432: $PREPARSER ./calc input +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -113572,138 +113875,124 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 105): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) - | 1 + 2 * 3 + !* ++ -stderr: -Starting parse -Entering state 0 +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 106): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 125): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 108): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1408: $PREPARSER ./calc input -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -113820,19 +114109,62 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -stderr: -stderr: -stderr: + | (1 + 1) / (1 - 1) +./calc.at:1407: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) stderr: -1.3: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -113870,14 +114202,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -113891,93 +114223,298 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) +Entering state 21 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 30 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 8 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 +Reading a token Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -syntax error +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +./calc.at:1431: cat stderr +./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1408: cat stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 2 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1414: cat stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +516. calc.at:1408: ok +./calc.at:1413: cat stderr +stderr: +1.3: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +./calc.at:1411: cat stderr +input: + + | (* *) + (*) + (*) +stderr: +./calc.at:1414: $PREPARSER ./calc input +input: +1.3: syntax error Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): +Reducing stack 0 by rule 5 (line 81): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 8 +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.1: ) +Shifting token '+' (1.1: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 7 (line 98): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 2) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 2) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 2) +Entering state 8 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token '/' (1.1: ) +Shifting token '/' (1.1: ) +Entering state 23 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Next token is token '-' (1.1: ) +Shifting token '-' (1.1: ) +Entering state 20 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 81): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.1: ) +Reducing stack 0 by rule 8 (line 99): $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 + $2 = token '-' (1.1: ) + $3 = nterm exp (1.1: 1) +-> $$ = nterm exp (1.1: 0) +Entering state 12 +Next token is token ')' (1.1: ) +Shifting token ')' (1.1: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 118): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.1: 0) + $3 = token ')' (1.1: ) +-> $$ = nterm exp (1.1: 0) +Entering state 32 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.1: ) +Reducing stack 0 by rule 10 (line 101): + $1 = nterm exp (1.1: 2) + $2 = token '/' (1.1: ) + $3 = nterm exp (1.1: 0) +error: null divisor +-> $$ = nterm exp (1.1: 2) +Entering state 8 +Next token is token '\n' (1.1: ) +Shifting token '\n' (1.1: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 77): + $1 = nterm exp (1.1: 2) + $2 = token '\n' (1.1: ) +-> $$ = nterm line (1.1: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 71): + $1 = nterm line (1.1: ) +-> $$ = nterm input (1.1: ) +Entering state 6 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 131): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Now at end of input. +Shifting token "end of input" (1.1: ) +Entering state 17 +Cleanup: popping token "end of input" (1.1: ) +Cleanup: popping nterm input (1.1: ) +input: + | + | +1 +./calc.at:1413: $PREPARSER ./calc input +stderr: + | error +input: | 1 + 2 * 3 + !* ++ stderr: +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1411: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -113985,19 +114522,13 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) Reading a token @@ -114021,66 +114552,71 @@ Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 Reading a token -Next token is token '=' (1.15: ) +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -114094,9 +114630,28 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stdout: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) stderr: -1.3: syntax error -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -114106,8 +114661,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1416: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.c calc.h + +syntax error +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1411: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -114144,14 +114712,14 @@ Entering state 31 Reading a token Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 107): +Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -114168,23 +114736,15 @@ Next token is token '*' (1.14: ) Shifting token '*' (1.14: ) Entering state 15 -Reducing stack 0 by rule 19 (line 131): +Reducing stack 0 by rule 19 (line 119): $1 = token '!' (1.13: ) $2 = token '*' (1.14: ) 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1407: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -114195,6 +114755,26 @@ }eg ' expout || exit 77 stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) Starting parse Entering state 0 Reading a token @@ -114261,10 +114841,142 @@ 1.14: memory exhausted Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: cat stderr -./calc.at:1414: cat stderr -./calc.at:1407: "$PERL" -pi -e 'use strict; +stderr: +syntax error +stderr: +./calc.at:1433: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1407: cat stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -114274,7 +114986,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -114285,18 +114997,6 @@ }eg ' expout || exit 77 stderr: -input: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1408: cat stderr Starting parse Entering state 0 Reading a token @@ -114332,422 +115032,810 @@ -> $$ = nterm exp (1.9: 3) Entering state 31 Reading a token -Next token is token '+' (1.11: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 9 (line 101): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 30 -Next token is token '+' (1.11: ) +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 28 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (#) + (#) = 2222 -input: -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1416: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1407: cat stderr -stderr: -./calc.at:1413: cat stderr -Starting parse -Entering state 0 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (5.11-6.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1432: cat stderr -input: -./calc.at:1431: cat stderr -Starting parse -Entering state 0 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1416: $PREPARSER ./calc input -input: +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1411: cat stderr input: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -114757,102 +115845,102 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1407: $PREPARSER ./calc input -input: - | (* *) + (*) + (*) -input: - | (#) + (#) = 2222 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1408: $PREPARSER ./calc input + | 1//2 +./calc.at:1433: $PREPARSER ./calc input +515. calc.at:1407: ok +./calc.at:1413: cat stderr stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1414: cat stderr +./calc.at:1413: $PREPARSER ./calc /dev/null stderr: -./calc.at:1411: cat stderr +input: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -114860,465 +115948,876 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | error -./calc.at:1432: $PREPARSER ./calc input -stderr: -stderr: -input: -stderr: -Starting parse -Entering state 0 +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 21 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 30 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 22 Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 31 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 19 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 28 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 24 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 33 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 19 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 28 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 24 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 33 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 19 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 28 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 19 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 28 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 20 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 29 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 20 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 29 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 19 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 111): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 28 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 20 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 20 Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 29 Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 29 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 19 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 28 Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 24 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 33 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 24 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 33 Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 33 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 19 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 28 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 24 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 33 Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 24 Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 33 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 112): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 19 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 28 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 18 +Reducing stack 0 by rule 2 (line 79): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +input: +input: + | 1 + 2 * 3 + !+ ++ +stderr: +./calc.at:1414: $PREPARSER ./calc input + | 1 2 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1416: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1431: cat stderr +stderr: +Starting parse +Entering state 0 Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 +Entering state 8 Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) +Cleanup: discarding lookahead token "number" (1.3: 2) + | (#) + (#) = 2222 +531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose ... +./calc.at:1441: mv calc.y.tmp calc.y + +./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +Starting parse +Entering state 0 Reading a token Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +stderr: +Starting parse +Entering state 0 Reading a token Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) Starting parse Entering state 0 Reading a token @@ -115338,7 +116837,7 @@ Next token is token ')' (1.3: ) Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -115365,7 +116864,7 @@ Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -115373,7 +116872,7 @@ Entering state 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -115416,262 +116915,28 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 21 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 30 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 21 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 21 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 21 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 22 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 31 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 22 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 19 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) + | 1 = 2 = 3 ./calc.at:1431: $PREPARSER ./calc input -stderr: -stderr: -input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -1.1: syntax error -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -115681,96 +116946,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: $PREPARSER ./calc input -stderr: -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -syntax error: invalid character: '#' -Next token is token error (1.1: ) -Error: popping token '+' (1.1: ) -Error: popping nterm exp (1.1: 1) -Shifting token error (1.1: ) -Entering state 11 -Next token is token error (1.1: ) -Error: discarding token error (1.1: ) -Reading a token -Next token is token '+' (1.1: ) -Error: discarding token '+' (1.1: ) -Reading a token -Next token is token "number" (1.1: 1) -Error: discarding token "number" (1.1: 1) -Reading a token -Next token is token ')' (1.1: ) -Entering state 11 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 119): - $1 = token '(' (1.1: ) - $2 = token error (1.1: ) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.1: ) -Shifting token '=' (1.1: ) -Entering state 19 -Reading a token -Next token is token "number" (1.1: 1111) -Shifting token "number" (1.1: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 6 (line 82): - $1 = nterm exp (1.1: 1111) - $2 = token '=' (1.1: ) - $3 = nterm exp (1.1: 1111) --> $$ = nterm exp (1.1: 1111) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 1111) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -115790,7 +116967,7 @@ Next token is token ')' (1.3: ) Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2: ) $3 = token ')' (1.3: ) @@ -115817,7 +116994,7 @@ Next token is token ')' (1.9: ) Shifting token ')' (1.9: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.7: ) $2 = token error (1.8: ) $3 = token ')' (1.9: ) @@ -115825,7 +117002,7 @@ Entering state 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1-3: 1111) $2 = token '+' (1.5: ) $3 = nterm exp (1.7-9: 1111) @@ -115868,8 +117045,237 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +./calc.at:1413: cat stderr +./calc.at:1416: cat stderr +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1414: $PREPARSER ./calc input + | 1//2 +syntax error +./calc.at:1416: $PREPARSER ./calc input +stderr: +./calc.at:1433: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +input: +532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +input: stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1443: mv calc.y.tmp calc.y + +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + | error +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 23 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1433: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -116117,344 +117523,9 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: -./calc.at:1409: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1407: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1441: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116474,137 +117545,79 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr -./calc.at:1408: cat stderr -input: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: cat stderr | (1 + #) = 1111 -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1411: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1407: cat stderr stderr: -input: +stderr: +./calc.at:1411: $PREPARSER ./calc input +1.1: syntax error +stderr: +./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: cat stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1414: $PREPARSER ./calc input -input: +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -116613,187 +117626,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1432: cat stderr - | (1 + #) = 1111 -input: -./calc.at:1408: $PREPARSER ./calc input -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1407: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1416: $PREPARSER ./calc input -input: -stderr: -Starting parse -Entering state 0 +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.12: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -116806,7 +117866,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: Starting parse Entering state 0 Reading a token @@ -116840,7 +117899,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -116884,9 +117943,15 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1416: cat stderr +./calc.at:1431: cat stderr +stdout: +1.1: syntax error +stderr: +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -116896,128 +117961,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1411: $PREPARSER ./calc input -./calc.at:1413: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) -Entering state 21 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 8 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) -Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -./calc.at:1407: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: cat stderr Starting parse Entering state 0 Reading a token @@ -117095,202 +118038,177 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1434: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | error + | + | +1 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1414: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1413: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 16 -Reducing stack 0 by rule 16 (line 116): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 21 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1434: $PREPARSER ./calc input +syntax error +stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1411: cat stderr +input: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr + | 1 + 2 * 3 + !* ++ +./calc.at:1414: $PREPARSER ./calc input stderr: input: -./calc.at:1431: cat stderr - | 1 = 2 = 3 -./calc.at:1432: $PREPARSER ./calc input +stderr: +stderr: +syntax error + | (!!) + (1 2) = 1 +./calc.at:1413: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +stderr: +./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (# + 1) = 1111 +./calc.at:1411: $PREPARSER ./calc input +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | 1 = 2 = 3 Starting parse Entering state 0 Reading a token @@ -117402,12 +118320,21 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ +./calc.at:1443: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: stderr: -./calc.at:1413: $PREPARSER ./calc input -input: Starting parse Entering state 0 Reading a token @@ -117415,26 +118342,18 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Error: discarding token '+' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -117485,130 +118404,89 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.7: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -stderr: -./calc.at:1409: $PREPARSER ./calc input +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 81): +Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.1: 1) -> $$ = nterm exp (1.1: 1) -Entering state 12 +Entering state 8 Reading a token -Next token is token '+' (1.1: ) -Shifting token '+' (1.1: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 7 (line 98): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 2) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 2) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.1: ) -Shifting token '/' (1.1: ) -Entering state 23 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.1: ) -Shifting token '-' (1.1: ) -Entering state 20 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 81): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 29 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token ')' (1.1: ) -Reducing stack 0 by rule 8 (line 99): +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.1: 1) - $2 = token '-' (1.1: ) - $3 = nterm exp (1.1: 1) --> $$ = nterm exp (1.1: 0) -Entering state 12 -Next token is token ')' (1.1: ) -Shifting token ')' (1.1: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 118): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.1: 0) - $3 = token ')' (1.1: ) --> $$ = nterm exp (1.1: 0) -Entering state 32 -Reading a token -Next token is token '\n' (1.1: ) -Reducing stack 0 by rule 10 (line 101): - $1 = nterm exp (1.1: 2) - $2 = token '/' (1.1: ) - $3 = nterm exp (1.1: 0) -error: null divisor --> $$ = nterm exp (1.1: 2) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.1: ) -Shifting token '\n' (1.1: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 77): - $1 = nterm exp (1.1: 2) - $2 = token '\n' (1.1: ) --> $$ = nterm line (1.1: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 71): - $1 = nterm line (1.1: ) --> $$ = nterm input (1.1: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (1.1: ) -Entering state 17 -Cleanup: popping token "end of input" (1.1: ) -Cleanup: popping nterm input (1.1: ) -stderr: -./calc.at:1431: $PREPARSER ./calc /dev/null +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: + | 1 2 Starting parse Entering state 0 Reading a token @@ -117720,71 +118598,14 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1416: cat stderr +./calc.at:1434: $PREPARSER ./calc input +stderr: +1.7: syntax error +stderr: +stderr: +1.3: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -117810,7 +118631,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) @@ -117854,11 +118675,9 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -stderr: -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1431: cat stderr +input: +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117868,82 +118687,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.7: syntax error -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: cat stderr -stderr: -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117953,9 +118697,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -./calc.at:1414: "$PERL" -pi -e 'use strict; + | 1 = 2 = 3 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117965,7 +118709,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1407: "$PERL" -pi -e 'use strict; +./calc.at:1431: $PREPARSER ./calc /dev/null +stderr: +stderr: +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -117975,6 +118722,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.3: syntax error +./calc.at:1414: cat stderr +stderr: +syntax error Starting parse Entering state 0 Reading a token @@ -117986,9 +118737,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -117996,79 +118747,23 @@ Reducing stack 0 by rule 5 (line 88): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Entering state 28 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: cat stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr ./calc.at:1411: cat stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: input: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: cat stderr - | (# + 1) = 1111 -./calc.at:1408: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1416: "$PERL" -pi -e 'use strict; +stderr: + | (#) + (#) = 2222 +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -118078,11 +118773,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1409: cat stderr +./calc.at:1414: $PREPARSER ./calc input +syntax error stderr: -./calc.at:1407: cat stderr +input: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 19 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 28 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) stderr: input: + | (1 + # + 1) = 1111 Starting parse Entering state 0 Reading a token @@ -118097,49 +118824,77 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -118152,96 +118907,16 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1416: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1409: $PREPARSER ./calc input - | (# + 1) = 1111 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | + | +1 ./calc.at:1411: $PREPARSER ./calc input -515. calc.at:1407: ok + | (- *) + (1 2) = 1 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1413: $PREPARSER ./calc input stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 stderr: -./calc.at:1414: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -118281,7 +118956,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -118325,147 +119000,20 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1432: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1434: cat stderr ./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -118583,14 +119131,116 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 19 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: input: - | - | +1 -./calc.at:1432: $PREPARSER ./calc input - | (- *) + (1 2) = 1 Starting parse Entering state 0 Reading a token @@ -118630,7 +119280,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -118674,148 +119324,13 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1431: cat stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: cat stderr + | 1//2 +./calc.at:1434: $PREPARSER ./calc input +2.1: syntax error stderr: stderr: -2.1: syntax error -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -118933,7 +119448,8 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: "$PERL" -pi -e 'use strict; +stdout: +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -118943,123 +119459,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119069,8 +119469,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1409: "$PERL" -pi -e 'use strict; +1.3: syntax error +./calc.at:1411: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119080,9 +119480,64 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1431: cat stderr +./calc.at:1432: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + | + | +1 +./calc.at:1416: $PREPARSER ./calc input +./calc.at:1414: cat stderr +./calc.at:1433: cat stderr +stderr: stderr: +./calc.at:1411: cat stderr +1.3: syntax error input: -./calc.at:1411: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1432: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119092,15 +119547,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error +./calc.at:1433: $PREPARSER ./calc /dev/null +input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: +stderr: ./calc.at:1431: $PREPARSER ./calc input -./calc.at:1411: cat stderr -./calc.at:1409: cat stderr -./calc.at:1408: cat stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; +input: +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -119110,165 +119564,30 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.1: syntax error +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (1 + #) = 1111 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1413: cat stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 115): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 19 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 28 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: +./calc.at:1414: $PREPARSER ./calc input +1.1: syntax error syntax error syntax error syntax error syntax error error: 4444 != 1 + | (1 + 1) / (1 - 1) ./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -./calc.at:1414: cat stderr - | (1 + # + 1) = 1111 ./calc.at:1411: $PREPARSER ./calc input stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -stdout: -input: -input: +stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -119297,49 +119616,43 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -119352,31 +119665,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1433: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -input: -./calc.at:1409: $PREPARSER ./calc input -./calc.at:1416: cat stderr -input: - | (1 + # + 1) = 1111 -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1413: $PREPARSER ./calc input - | (* *) + (*) + (*) -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1408: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -119396,58 +119684,90 @@ Shifting token '+' (1.4: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 30 Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 23 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 20 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 100): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 27 +Reducing stack 0 by rule 13 (line 113): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 32 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 102): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -119460,81 +119780,15 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -stderr: -stderr: +./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 126): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 83): + $1 = token '\n' (1.1-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -119542,11 +119796,33 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1434: cat stderr +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) +./calc.at:1413: $PREPARSER ./calc input +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -119575,7 +119851,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -119584,7 +119860,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -119620,7 +119896,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -119629,7 +119905,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -119637,7 +119913,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): +Reducing stack 0 by rule 10 (line 102): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -119662,8 +119938,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -119779,10 +120053,523 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -531. calc.at:1441: testing Calculator C++ %locations parse.error=verbose %debug api.prefix={calc} api.token.prefix={TOK_} %verbose ... -./calc.at:1441: mv calc.y.tmp calc.y +stderr: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | 1 2 +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1433: cat stderr + | error +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1416: cat stderr +./calc.at:1411: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error +stderr: +input: +1.1: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: $PREPARSER ./calc /dev/null + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1414: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 30 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1411: cat stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +./calc.at:1431: cat stderr +stderr: +1.1: syntax error +stderr: +1.3: syntax error +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +518. calc.at:1411: ok + | (# + 1) = 1111 +./calc.at:1414: $PREPARSER ./calc input +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +stderr: +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1413: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (!!) + (1 2) = 1 +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: -./calc.at:1441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1434: cat stderr +syntax error +error: 2222 != 1 +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 + !+ ++ +./calc.at:1432: cat stderr +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1416: cat stderr +stderr: +input: +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1434: $PREPARSER ./calc input +input: +input: Starting parse Entering state 0 Reading a token @@ -119840,16 +120627,26 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -input: +stderr: +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1432: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +1.7: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: $PREPARSER ./calc input +stderr: +./calc.at:1414: cat stderr +./calc.at:1433: cat stderr +1.3: syntax error ./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -119860,26 +120657,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1432: $PREPARSER ./calc /dev/null - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1433: $PREPARSER ./calc input +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: - | (* *) + (*) + (*) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -119887,102 +120667,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 30 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 21 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 30 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 21 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -119995,8 +120907,83 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1411: "$PERL" -pi -e 'use strict; +1.7: syntax error +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: +input: +1.3: syntax error +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120006,7 +120993,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1414: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1431: cat stderr stderr: Starting parse Entering state 0 @@ -120047,7 +121037,7 @@ Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) Entering state 26 -Reducing stack 0 by rule 14 (line 126): +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) $2 = token error (1.2-10: ) $3 = token ')' (1.11: ) @@ -120092,125 +121082,7 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -stderr: -stderr: -stderr: -./calc.at:1431: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 21 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 30 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -120268,17 +121140,27 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1411: cat stderr -1.1: syntax error +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -120286,102 +121168,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 Reading a token -Next token is token ')' (1.5: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 21 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 21 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 30 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 21 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 30 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) Entering state 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 30 Reading a token -Next token is token '+' (1.13: ) +Next token is token '+' (1.30: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 21 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 22 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 31 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 22 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' (1.42: ) Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 30 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 19 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -120394,7 +121408,111 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 21 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | (- *) + (1 2) = 1 +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1445: mv calc.y.tmp calc.y + +./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1432: cat stderr +syntax error +syntax error +error: 2222 != 1 +./calc.at:1434: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120404,9 +121522,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1409: "$PERL" -pi -e 'use strict; +./calc.at:1414: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120416,7 +121532,77 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1413: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1416: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +input: +./calc.at:1414: cat stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120426,16 +121612,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -1.1: syntax error -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1408: cat stderr -./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1413: cat stderr +syntax error +syntax error +error: 2222 != 1 + | error input: -./calc.at:1414: "$PERL" -pi -e 'use strict; + | + | +1 +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120445,7 +121629,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1432: $PREPARSER ./calc input +input: +./calc.at:1434: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1416: $PREPARSER ./calc input +stderr: input: +1.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +stderr: +./calc.at:1414: $PREPARSER ./calc input +stderr: +./calc.at:1433: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -120453,102 +121651,98 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token ')' (1.5: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 21 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 30 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' (1.14: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 21 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 30 +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -120561,27 +121755,13 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + 1) / (1 - 1) -./calc.at:1408: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1431: $PREPARSER ./calc input -input: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1409: cat stderr -input: -stderr: -./calc.at:1414: cat stderr - | (1 + 1) / (1 - 1) +./calc.at:1413: cat stderr +1.1: syntax error stderr: +2.1: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Reading a token @@ -120610,7 +121790,7 @@ Entering state 30 Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): +Reducing stack 0 by rule 7 (line 99): $1 = nterm exp (1.2: 1) $2 = token '+' (1.4: ) $3 = nterm exp (1.6: 1) @@ -120619,7 +121799,7 @@ Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.1: ) $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) @@ -120655,7 +121835,7 @@ Entering state 29 Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): +Reducing stack 0 by rule 8 (line 100): $1 = nterm exp (1.12: 1) $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) @@ -120664,7 +121844,7 @@ Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Reducing stack 0 by rule 13 (line 113): $1 = token '(' (1.11: ) $2 = nterm exp (1.12-16: 0) $3 = token ')' (1.17: ) @@ -120672,7 +121852,7 @@ Entering state 32 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): +Reducing stack 0 by rule 10 (line 102): $1 = nterm exp (1.1-7: 2) $2 = token '/' (1.9: ) $3 = nterm exp (1.11-17: 0) @@ -120697,14 +121877,23 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1408: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -error: 2222 != 1 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1416: "$PERL" -pi -e 'use strict; +stderr: + | (- *) + (1 2) = 1 +stderr: +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -120714,8 +121903,199 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +2.1: syntax error +input: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 16 +Reducing stack 0 by rule 16 (line 116): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 21 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | 1 + 2 * 3 + !* ++ +./calc.at:1413: $PREPARSER ./calc input stderr: stderr: +./calc.at:1431: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 21 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +stderr: +./calc.at:1445: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS Starting parse Entering state 0 Reading a token @@ -120831,113 +122211,266 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1411: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 2 +./calc.at:1432: cat stderr + | (* *) + (*) + (*) +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1414: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 105): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +./calc.at:1414: cat stderr +./calc.at:1416: cat stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +syntax error +syntax error +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr +520. calc.at:1414: ok +input: +stderr: +./calc.at:1433: cat stderr + | 1 = 2 = 3 +./calc.at:1413: cat stderr +./calc.at:1432: $PREPARSER ./calc input +syntax error +syntax error +syntax error +./calc.at:1434: $PREPARSER ./calc /dev/null +input: + | (- *) + (1 2) = 1 +input: +stderr: +input: +./calc.at:1416: $PREPARSER ./calc input +stderr: + | (* *) + (*) + (*) +./calc.at:1433: $PREPARSER ./calc input +1.7: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (#) + (#) = 2222 +./calc.at:1413: $PREPARSER ./calc input +stderr: +1.1: syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 106): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 125): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 108): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -120951,11 +122484,12 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1433: $PREPARSER ./calc input -syntax error -error: 2222 != 1 -./calc.at:1416: cat stderr -./calc.at:1432: cat stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error + +stderr: Starting parse Entering state 0 Reading a token @@ -121053,8 +122587,39 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -517. calc.at:1409: ok +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +1.1: syntax error +1.7: syntax error +stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -121062,101 +122627,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 115): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 21 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 19 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 28 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 2222) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -121170,28 +122736,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -1.3: syntax error -./calc.at:1408: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1441: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS - | 1 + 2 * 3 + !+ ++ -./calc.at:1414: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1416: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -121289,146 +122833,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1432: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1408: cat stderr -stderr: -1.3: syntax error -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: "$PERL" -pi -e 'use strict; +./calc.at:1433: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121438,16 +122843,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -516. calc.at:1408: ok -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1413: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121457,7 +122853,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121467,74 +122863,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1411: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1433: "$PERL" -pi -e 'use strict; +./calc.at:1432: cat stderr +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -121544,237 +122874,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: - - | 1 + 2 * 3 + !- ++ +./calc.at:1431: cat stderr +./calc.at:1416: cat stderr +./calc.at:1433: cat stderr ./calc.at:1413: cat stderr -./calc.at:1414: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -518. calc.at:1411: ok -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 117): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1434: cat stderr +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1431: $PREPARSER ./calc input +534. calc.at:1446: testing Calculator C++ %header %locations parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1446: mv calc.y.tmp calc.y -./calc.at:1431: cat stderr input: -stderr: -./calc.at:1433: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) input: -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 | (1 + #) = 1111 ./calc.at:1413: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ +input: +input: + | + | +1 +./calc.at:1432: $PREPARSER ./calc input stderr: +input: + | (* *) + (*) + (*) ./calc.at:1416: $PREPARSER ./calc input +stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1434: $PREPARSER ./calc input +stderr: +./calc.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y Starting parse Entering state 0 Reading a token @@ -121852,169 +122982,141 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1433: $PREPARSER ./calc input stderr: -input: -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | (- *) + (1 2) = 1 -./calc.at:1431: $PREPARSER ./calc input - -./calc.at:1414: cat stderr - | 1//2 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: - | 1 + 2 * 3 + !* ++ -./calc.at:1432: cat stderr -stderr: -./calc.at:1414: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 118): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -122092,186 +123194,132 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.3: syntax error +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 30 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 21 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -error: 2222 != 1 -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -533. calc.at:1445: testing Calculator C++ parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1413: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1445: mv calc.y.tmp calc.y - -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 21 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 30 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 22 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 31 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 101): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 30 -Next token is token '+' (1.11: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 21 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 25 +Reducing stack 0 by rule 4 (line 84): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 78): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): - $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -532. calc.at:1443: testing Calculator C++ %header %locations parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | (!!) + (1 2) = 1 -1.3: syntax error -./calc.at:1416: cat stderr -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1443: mv calc.y.tmp calc.y - -./calc.at:1413: cat stderr +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 17 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -534. calc.at:1446: testing Calculator C++ %header %locations parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1431: "$PERL" -pi -e 'use strict; +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122281,8 +123329,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122292,10 +123339,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: mv calc.y.tmp calc.y - -./calc.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1433: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !- ++ +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122306,20 +123352,12 @@ }eg ' expout || exit 77 input: -./calc.at:1443: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | (# + 1) = 1111 -input: -stderr: -./calc.at:1414: cat stderr -./calc.at:1413: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1431: cat stderr - | 1 + 2 * 3 + !* ++ -./calc.at:1416: $PREPARSER ./calc input stderr: -input: -./calc.at:1432: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !- ++ +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1432: cat stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122329,7 +123367,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1413: cat stderr +stderr: +./calc.at:1416: cat stderr +stderr: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: $PREPARSER ./calc /dev/null stderr: +input: +stderr: + | (# + 1) = 1111 +./calc.at:1413: $PREPARSER ./calc input +./calc.at:1434: cat stderr +1.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1416: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -122399,10 +123454,11 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: input: - | (* *) + (*) + (*) - | (#) + (#) = 2222 -./calc.at:1433: cat stderr + | (!!) + (1 2) = 1 +./calc.at:1434: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -122460,123 +123516,46 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS stderr: +1.1: syntax error +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -syntax error -syntax error -syntax error +./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +1.11: syntax error +1.1-16: error: 2222 != 1 stderr: Starting parse Entering state 0 @@ -122647,10 +123626,6 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | error -./calc.at:1433: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -122708,24 +123683,25 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '*' (1.14: ) -Shifting token '*' (1.14: ) -Entering state 15 -Reducing stack 0 by rule 19 (line 119): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 117): $1 = token '!' (1.13: ) - $2 = token '*' (1.14: ) -1.14: memory exhausted + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1432: cat stderr -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -syntax error -syntax error -syntax error -1.1: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: cat stderr ./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -122736,12 +123712,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS input: - | (- *) + (1 2) = 1 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1433: cat stderr +./calc.at:1413: cat stderr +./calc.at:1432: cat stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -122751,143 +123726,86 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1416: $PREPARSER ./calc input +input: +input: + | (1 + # + 1) = 1111 +./calc.at:1413: $PREPARSER ./calc input +stderr: +input: +input: + | (#) + (#) = 2222 +./calc.at:1431: $PREPARSER ./calc input + | (#) + (#) = 2222 stderr: +./calc.at:1433: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: -./calc.at:1413: cat stderr -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1416: cat stderr -1.1: syntax error -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -input: - | (1 + # + 1) = 1111 -./calc.at:1413: $PREPARSER ./calc input -./calc.at:1431: cat stderr +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: Starting parse Entering state 0 Reading a token @@ -122971,133 +123889,96 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1416: $PREPARSER ./calc input -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1432: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1414: cat stderr -input: +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 30 Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1431: $PREPARSER ./calc input -stderr: +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 118): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Reading a token @@ -123181,113 +124062,28 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr -input: -./calc.at:1433: cat stderr - | (1 + #) = 1111 -./calc.at:1414: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 21 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 19 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 28 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: +input: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 + | (- *) + (1 2) = 1 +./calc.at:1434: $PREPARSER ./calc input stderr: +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -123298,92 +124094,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1431: $EGREP -c -v 'Return for a new token:|LAC:' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -input: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 - | (* *) + (*) + (*) -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1413: cat stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -123394,110 +124108,55 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -1.7: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1431: $PREPARSER ./calc input -stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1413: cat stderr ./calc.at:1416: cat stderr -stderr: -stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr +./calc.at:1432: cat stderr +input: | (1 + 1) / (1 - 1) +./calc.at:1431: cat stderr +input: ./calc.at:1413: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error -1.7: syntax error -stderr: +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +input: +./calc.at:1433: $PREPARSER ./calc input input: stderr: + | 1 + 2 * 3 + !* ++ Starting parse Entering state 0 Reading a token @@ -123613,118 +124272,91 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + #) = 1111 ./calc.at:1416: $PREPARSER ./calc input -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | (!!) + (1 2) = 1 +./calc.at:1432: $PREPARSER ./calc input +stderr: ./calc.at:1413: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: +input: +1.6: syntax error: invalid character: '#' stderr: +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | (1 + #) = 1111 +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1431: $PREPARSER ./calc input +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -123840,89 +124472,81 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr -./calc.at:1433: cat stderr -./calc.at:1414: cat stderr stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1434: cat stderr +syntax error: invalid character: '#' +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 30 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 22 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 28 + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 31 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 101): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 30 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1431: "$PERL" -pi -e 'use strict; +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '*' (1.14: ) +Shifting token '*' (1.14: ) +Entering state 15 +Reducing stack 0 by rule 19 (line 119): + $1 = token '!' (1.13: ) + $2 = token '*' (1.14: ) +1.14: memory exhausted +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1413: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123933,7 +124557,10 @@ }eg ' expout || exit 77 input: -./calc.at:1413: "$PERL" -pi -e 'use strict; + | (* *) + (*) + (*) +stderr: +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123943,21 +124570,62 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1432: $PREPARSER ./calc input +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +stderr: ./calc.at:1413: cat stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +519. calc.at:1413: ok +./calc.at:1433: cat stderr +./calc.at:1416: cat stderr +./calc.at:1432: cat stderr stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | (# + 1) = 1111 -./calc.at:1414: $PREPARSER ./calc input input: - | - | +1 -stderr: +input: + | (- *) + (1 2) = 1 +./calc.at:1432: $PREPARSER ./calc input + | (#) + (#) = 2222 + | (# + 1) = 1111 +./calc.at:1416: $PREPARSER ./calc input ./calc.at:1433: $PREPARSER ./calc input -./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; + +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -123967,10 +124635,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1431: cat stderr stderr: -519. calc.at:1413: ok stderr: +./calc.at:1431: cat stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 Starting parse Entering state 0 Reading a token @@ -123985,49 +124656,77 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -124040,12 +124739,21 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' ./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1416: cat stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1434: cat stderr +stderr: +input: stderr: + | (# + 1) = 1111 +./calc.at:1431: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -124060,49 +124768,77 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 21 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 99): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 19 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 28 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -124115,16 +124851,11 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -2.1: syntax error -input: input: - | 1 + 2 * 3 + !- ++ -./calc.at:1432: $PREPARSER ./calc input - | (#) + (#) = 2222 + | 1 + 2 * 3 + !+ ++ +./calc.at:1434: $PREPARSER ./calc input stderr: -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1431: $PREPARSER ./calc input -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124134,7 +124865,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +stderr: +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124144,21 +124879,86 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1414: cat stderr stderr: +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr +535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none ... +./calc.at:1448: mv calc.y.tmp calc.y + input: stderr: - +./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | 1 + 2 * 3 + !- ++ +./calc.at:1434: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1416: cat stderr +./calc.at:1432: cat stderr +stderr: +stderr: +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (# + 1) = 1111 +stdout: +input: +./calc.at:1437: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1433: cat stderr + | (* *) + (*) + (*) +./calc.at:1432: $PREPARSER ./calc input +stderr: +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | (1 + #) = 1111 ./calc.at:1416: $PREPARSER ./calc input - | (1 + # + 1) = 1111 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1414: $PREPARSER ./calc input -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.2: syntax error +1.10: syntax error +1.16: syntax error + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: $PREPARSER ./calc input stderr: +input: stderr: + | (1 + # + 1) = 1111 Starting parse Entering state 0 Reading a token @@ -124187,81 +124987,6 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token Next token is token ')' (1.7: ) Entering state 11 Next token is token ')' (1.7: ) @@ -124312,7 +125037,10 @@ Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1431: cat stderr +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124322,28 +125050,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: cat stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -stdout: -./calc.at:1426: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1433: $PREPARSER ./calc /dev/null -stderr: -stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: Starting parse Entering state 0 Reading a token @@ -124372,49 +125087,43 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 19 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 28 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -124427,7 +125136,68 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (1 + # + 1) = 1111 +stderr: +./calc.at:1431: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.6: syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1448: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1434: cat stderr +./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: cat stderr +stderr: +input: +syntax error: invalid character: '#' +input: + | (#) + (#) = 2222 +./calc.at:1434: $PREPARSER ./calc input +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | 1 2 +./calc.at:1437: $PREPARSER ./calc input +input: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +1.3: syntax error, unexpected number +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 ./calc.at:1432: cat stderr +./calc.at:1416: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -124497,75 +125267,12 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1431: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1414: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1426: $PREPARSER ./calc input -input: -535. calc.at:1448: testing Calculator C++ %header %locations api.location.file=none ... -./calc.at:1448: mv calc.y.tmp calc.y - - | (#) + (#) = 2222 -stderr: -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1414: cat stderr -./calc.at:1416: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.1: syntax error -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1431: cat stderr -./calc.at:1416: cat stderr stderr: +./calc.at:1433: cat stderr 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1433: "$PERL" -pi -e 'use strict; +./calc.at:1431: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124575,16 +125282,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error, unexpected number +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1432: $PREPARSER ./calc input stderr: input: | (1 + 1) / (1 - 1) -./calc.at:1414: $PREPARSER ./calc input -input: - | (1 + # + 1) = 1111 -./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1416: $PREPARSER ./calc input +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; +./calc.at:1433: $PREPARSER ./calc input +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -124594,6 +125303,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -124601,102 +125312,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 +Error: discarding token '+' (1.4: ) Reading a token Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 +Error: discarding token "number" (1.6: 1) Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): +Entering state 26 +Reducing stack 0 by rule 14 (line 114): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 19 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -124709,9 +125374,76 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1431: cat stderr +1.11-17: error: null divisor +./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1437: cat stderr stderr: -./calc.at:1414: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11-17: error: null divisor +input: +./calc.at:1434: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1431: $PREPARSER ./calc input +input: +stderr: +error: null divisor +./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1416: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1433: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +input: +input: +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor + | 1//2 +./calc.at:1437: $PREPARSER ./calc input +stderr: +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1433: cat stderr + | (1 + #) = 1111 input: +./calc.at:1434: $PREPARSER ./calc input + | (1 + # + 1) = 1111 +stderr: +./calc.at:1416: $PREPARSER ./calc input +stderr: +stderr: +1.6: syntax error: invalid character: '#' +525. calc.at:1433: ok Starting parse Entering state 0 Reading a token @@ -124750,157 +125482,39 @@ Entering state 11 Next token is token ')' (1.11: ) Shifting token ')' (1.11: ) -Entering state 26 -Reducing stack 0 by rule 14 (line 114): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 19 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 28 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 89): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 25 -Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 78): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 17 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: - | (1 + #) = 1111 -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 21 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 30 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 99): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 23 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 20 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 88): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 100): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 27 -Reducing stack 0 by rule 13 (line 113): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 32 +Entering state 26 +Reducing stack 0 by rule 14 (line 114): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 102): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 19 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 88): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 28 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 89): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 25 Reducing stack 0 by rule 4 (line 84): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 78): @@ -124913,17 +125527,33 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1431: $PREPARSER ./calc input -input: -stderr: - | 1 2 -syntax error: invalid character: '#' -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: $PREPARSER ./calc input +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1433: cat stderr stderr: -./calc.at:1432: cat stderr +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1431: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + stderr: Starting parse Entering state 0 @@ -125008,10 +125638,7 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1414: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125021,15 +125648,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1433: $PREPARSER ./calc input -stderr: -syntax error - | (1 + #) = 1111 -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1416: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125039,20 +125658,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1414: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1416: cat stderr -stderr: -stdout: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1431: cat stderr +./calc.at:1432: cat stderr +./calc.at:1437: cat stderr +./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125062,33 +125671,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +523. calc.at:1431: ok +input: +input: + | error +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1434: cat stderr +536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh" ... +./calc.at:1449: mv calc.y.tmp calc.y + +./calc.at:1416: cat stderr +stderr: + | (#) + (#) = 2222 +./calc.at:1432: $PREPARSER ./calc input stderr: +./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: -1.6: syntax error: invalid character: '#' -./calc.at:1434: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 stderr: -520. calc.at:1414: ok -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +./calc.at:1434: $PREPARSER ./calc input +1.1: syntax error, unexpected invalid token +input: | (1 + 1) / (1 - 1) ./calc.at:1416: $PREPARSER ./calc input -./calc.at:1431: cat stderr -./calc.at:1448: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error, unexpected invalid token Starting parse Entering state 0 Reading a token @@ -125204,6 +125822,9 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' ./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -125214,52 +125835,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1416: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | (# + 1) = 1111 -./calc.at:1431: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' stderr: -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1432: cat stderr Starting parse Entering state 0 Reading a token @@ -125375,12 +125951,28 @@ Entering state 17 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -stderr: -./calc.at:1426: cat stderr -./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: -stderr: +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1434: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1432: cat stderr +./calc.at:1437: cat stderr ./calc.at:1416: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -125391,27 +125983,42 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -syntax error: invalid character: '#' -./calc.at:1432: $PREPARSER ./calc input -./calc.at:1433: cat stderr -stderr: +./calc.at:1434: cat stderr +537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh" ... +./calc.at:1451: mv calc.y.tmp calc.y + +./calc.at:1416: cat stderr input: -1.2: syntax error: invalid character: '#' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | 1 = 2 = 3 +./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1437: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1432: $PREPARSER ./calc input +./calc.at:1449: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS input: stderr: stderr: - | (!!) + (1 2) = 1 - | 1 2 -1.2: syntax error: invalid character: '#' -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1416: cat stderr + | (1 + # + 1) = 1111 ./calc.at:1434: $PREPARSER ./calc input - | 1//2 +521. calc.at:1416: ok +1.7: syntax error, unexpected '=' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.7: syntax error, unexpected '=' +stderr: +1.6: syntax error: invalid character: '#' +stderr: stdout: -./calc.at:1431: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' + +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125421,8 +126028,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1437: "$PERL" -ne ' +./calc.at:1437: cat stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -125431,23 +126048,9 @@ || /\s$/ # No tabs. || /\t/ - )' calc.cc + )' calc.cc calc.hh -stderr: -stderr: -1.3: syntax error -stderr: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -521. calc.at:1416: ok -input: -./calc.at:1431: cat stderr -stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125457,8 +126060,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.3: syntax error +input: +input: + | + | +1 +./calc.at:1437: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -125472,95 +126078,34 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -syntax error -./calc.at:1437: $PREPARSER ./calc input +./calc.at:1426: $PREPARSER ./calc input stderr: stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -input: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1451: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1434: cat stderr +stderr: +./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr +2.1: syntax error, unexpected '+' ./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -536. calc.at:1449: testing Calculator C++ %header %locations api.location.file="my-location.hh" ... -./calc.at:1449: mv calc.y.tmp calc.y - -./calc.at:1431: $PREPARSER ./calc input - -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1432: cat stderr stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1449: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1434: cat stderr -stderr: +2.1: syntax error, unexpected '+' input: - | (1 + # + 1) = 1111 -./calc.at:1432: $PREPARSER ./calc input -syntax error: invalid character: '#' input: | 1 2 -stderr: -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1433: cat stderr +./calc.at:1426: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) input: - | 1//2 -./calc.at:1426: cat stderr -1.6: syntax error: invalid character: '#' ./calc.at:1434: $PREPARSER ./calc input -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -input: -1.3: syntax error + | (# + 1) = 1111 +./calc.at:1432: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected number -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | error -./calc.at:1426: $PREPARSER ./calc input stderr: stderr: -input: syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error, unexpected number - | (- *) + (1 2) = 1 -./calc.at:1433: $PREPARSER ./calc input -stderr: -./calc.at:1431: "$PERL" -pi -e 'use strict; +1.11-17: error: null divisor +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125570,9 +126115,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error +./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1432: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1437: cat stderr +syntax error +1.11-17: error: null divisor +stderr: +1.2: syntax error: invalid character: '#' +538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose ... +./calc.at:1453: mv calc.y.tmp calc.y + +./calc.at:1437: $PREPARSER ./calc /dev/null +./calc.at:1434: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125582,12 +126140,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -537. calc.at:1451: testing Calculator C++ %no-lines %header %locations api.location.file="my-location.hh" ... -./calc.at:1451: mv calc.y.tmp calc.y - +./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -syntax error -./calc.at:1437: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected end of input +./calc.at:1432: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125597,16 +126153,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1437: cat stderr -./calc.at:1451: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1431: cat stderr -./calc.at:1432: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125616,13 +126164,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1434: cat stderr stderr: -input: -input: -stderr: - | 1//2 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1426: "$PERL" -pi -e 'use strict; +1.1: syntax error, unexpected end of input +./calc.at:1426: cat stderr +526. calc.at:1434: ok +./calc.at:1432: cat stderr +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -125632,13 +126180,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 - | (1 + 1) / (1 - 1) -./calc.at:1431: $PREPARSER ./calc input -stdout: +input: + | 1//2 +./calc.at:1426: $PREPARSER ./calc input +input: +stderr: +./calc.at:1437: cat stderr +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + | (1 + # + 1) = 1111 +./calc.at:1432: $PREPARSER ./calc input +stderr: stderr: +stdout: +1.6: syntax error: invalid character: '#' ./calc.at:1438: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -125650,25 +126206,13 @@ || /\t/ )' calc.cc -input: +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (1 + 1) / (1 - 1) -./calc.at:1432: $PREPARSER ./calc input -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error input: -stderr: -error: null divisor +./calc.at:1453: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -125682,27 +126226,28 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1431: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1426: cat stderr -stderr: -1.11-17: error: null divisor -./calc.at:1434: cat stderr -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: null divisor -input: stderr: -input: - | 1 = 2 = 3 -./calc.at:1426: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: - | error -1.11-17: error: null divisor -./calc.at:1433: cat stderr +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1434: $PREPARSER ./calc input +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -126721,7 +127266,7 @@ Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -126731,11 +127276,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 stderr: Starting parse Entering state 0 @@ -127754,21 +128300,11 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1432: cat stderr ./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr -syntax error -./calc.at:1451: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1432: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1431: "$PERL" -pi -e 'use strict; +./calc.at:1426: cat stderr +539. calc.at:1454: testing Calculator C++ %locations parse.lac=full parse.error=detailed ... +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -127778,53 +128314,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1437: cat stderr -input: - | (* *) + (*) + (*) -./calc.at:1433: $PREPARSER ./calc input +./calc.at:1454: mv calc.y.tmp calc.y + +./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: -stderr: -stderr: - | 1 2 input: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1432: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1432: $PREPARSER ./calc input | error -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1431: cat stderr +input: +./calc.at:1426: $PREPARSER ./calc input + | 1 2 +./calc.at:1437: cat stderr stderr: -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error -1.10: syntax error -1.16: syntax error +./calc.at:1438: $PREPARSER ./calc input stderr: +1.11-17: error: null divisor +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1432: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -127846,24 +128355,15 @@ Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: +stderr: +syntax error + | (!!) + (1 2) = 1 +stderr: +./calc.at:1437: $PREPARSER ./calc input +1.11-17: error: null divisor stderr: -523. calc.at:1431: ok -524. calc.at:1432: ok -1.1: syntax error, unexpected invalid token stderr: -input: -./calc.at:1433: cat stderr Starting parse Entering state 0 Stack now 0 @@ -127884,13 +128384,10 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -input: - | - | +1 -./calc.at:1426: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1437: "$PERL" -pi -e 'use strict; +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -127900,13 +128397,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -stderr: -syntax error stderr: -./calc.at:1434: cat stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: cat stderr +./calc.at:1432: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 ./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -127917,35 +128420,70 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: - -syntax error -./calc.at:1433: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1432: cat stderr ./calc.at:1438: cat stderr +./calc.at:1426: cat stderr +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +524. calc.at:1432: ok +./calc.at:1437: cat stderr input: input: - | 1 = 2 = 3 -./calc.at:1437: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1434: $PREPARSER ./calc input -input: -stderr: -1.7: syntax error, unexpected '=' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -input: -./calc.at:1433: $PREPARSER ./calc input -stderr: -1.7: syntax error | 1//2 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1438: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1426: $PREPARSER ./calc input stderr: stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +input: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1437: $PREPARSER ./calc input stderr: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -127974,7 +128512,21 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -1.7: syntax error, unexpected '=' +syntax error +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -127985,14 +128537,107 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: cat stderr +./calc.at:1437: cat stderr +540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace ... +./calc.at:1455: mv calc.y.tmp calc.y + +input: +./calc.at:1426: cat stderr +./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | error +./calc.at:1438: $PREPARSER ./calc input +input: +input: +stderr: + | (* *) + (*) + (*) + | + | +1 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1426: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error stderr: -538. calc.at:1453: testing Calculator C++ %locations parse.lac=full parse.error=verbose ... +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1453: mv calc.y.tmp calc.y - +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: cat stderr +./calc.at:1437: cat stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | 1 = 2 = 3 +./calc.at:1438: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1455: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS ./calc.at:1426: cat stderr +./calc.at:1437: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Stack now 0 @@ -128007,20 +128652,83 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) Stack now 0 8 Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1433: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1426: $PREPARSER ./calc /dev/null +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -128030,7 +128738,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1434: "$PERL" -pi -e 'use strict; +input: +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -128040,6 +128749,74 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1438: cat stderr +stderr: +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1426: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +./calc.at:1426: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 ./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -128050,31 +128827,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1453: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1426: $PREPARSER ./calc /dev/null -stderr: syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 ./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1437: cat stderr -539. calc.at:1454: testing Calculator C++ %locations parse.lac=full parse.error=detailed ... -./calc.at:1454: mv calc.y.tmp calc.y - -./calc.at:1454: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -stdout: syntax error -./calc.at:1440: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - +syntax error +syntax error +syntax error +error: 4444 != 1 ./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -128085,13 +128850,91 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1433: cat stderr input: - | - | +1 +./calc.at:1438: cat stderr + | (#) + (#) = 2222 ./calc.at:1437: $PREPARSER ./calc input +./calc.at:1438: $PREPARSER ./calc /dev/null +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1426: cat stderr +stderr: +./calc.at:1438: cat stderr +input: +stdout: +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + | (!!) + (1 2) = 1 +./calc.at:1426: $PREPARSER ./calc input +stderr: +./calc.at:1437: cat stderr +syntax error +error: 2222 != 1 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1434: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -128107,36 +128950,17 @@ | (2^2)^3 = 64 ./calc.at:1440: $PREPARSER ./calc input stderr: -./calc.at:1438: cat stderr input: input: -2.1: syntax error, unexpected '+' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1433: $PREPARSER ./calc input -stderr: - | - | +1 -./calc.at:1434: $PREPARSER ./calc input -2.1: syntax error, unexpected '+' +syntax error +error: 2222 != 1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1438: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1437: $PREPARSER ./calc input stderr: stderr: -input: - | error stderr: -2.1: syntax error -./calc.at:1454: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -129154,34 +129978,330 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +1.6: syntax error: invalid character: '#' ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -2.1: syntax error +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -129828,2965 +130948,378 @@ $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 -Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 -Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 -Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1426: cat stderr -./calc.at:1453: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1437: cat stderr -stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1437: $PREPARSER ./calc /dev/null -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -input: - | 1 2 -./calc.at:1440: $PREPARSER ./calc input -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: $PREPARSER ./calc input -1.1: syntax error, unexpected end of input -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: cat stderr -stderr: -stderr: -1.1: syntax error, unexpected end of input -./calc.at:1438: cat stderr -./calc.at:1434: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1433: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -stderr: -1.1: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 = 2 = 3 -./calc.at:1438: $PREPARSER ./calc input -stderr: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -1.1: syntax error - | (1 + #) = 1111 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1437: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1437: $PREPARSER ./calc input -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: - | 1//2 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1426: cat stderr -stderr: -1.6: syntax error: invalid character: '#' -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1434: cat stderr -stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -stderr: -syntax error -error: 2222 != 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1438: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error -error: 2222 != 1 -./calc.at:1433: cat stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1437: cat stderr -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1438: $PREPARSER ./calc input -stderr: -input: -./calc.at:1440: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 - | (# + 1) = 1111 -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 - | (!!) + (1 2) = 1 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1426: cat stderr -stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -1.2: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1438: cat stderr - | (- *) + (1 2) = 1 -stderr: -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1438: $PREPARSER ./calc /dev/null -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: cat stderr -stderr: -syntax error -syntax error -error: 2222 != 1 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: cat stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1440: cat stderr -stderr: - | (!!) + (1 2) = 1 -syntax error -syntax error -error: 2222 != 1 -./calc.at:1434: $PREPARSER ./calc input -input: -stderr: -./calc.at:1433: cat stderr -1.11: syntax error -1.1-16: error: 2222 != 1 -input: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | 1 = 2 = 3 -./calc.at:1440: $PREPARSER ./calc input -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -./calc.at:1438: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 - | (1 + # + 1) = 1111 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: $PREPARSER ./calc input -stderr: -stderr: -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -1.6: syntax error: invalid character: '#' -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1438: $PREPARSER ./calc input -stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1440: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: cat stderr -./calc.at:1437: cat stderr -input: -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1426: $PREPARSER ./calc input -stderr: -input: - | - | +1 -./calc.at:1440: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -syntax error -syntax error -syntax error -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -input: - | (* *) + (*) + (*) - | (- *) + (1 2) = 1 -stderr: -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1434: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -syntax error -syntax error -syntax error -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1433: cat stderr -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: cat stderr -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: -input: -stderr: - | (1 + 1) / (1 - 1) - | (!!) + (1 2) = 1 -stderr: -./calc.at:1433: $PREPARSER ./calc input -./calc.at:1438: $PREPARSER ./calc input -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1433: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: $PREPARSER ./calc /dev/null -1.11-17: error: null divisor -./calc.at:1437: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: cat stderr -./calc.at:1433: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: cat stderr -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1437: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1438: cat stderr -stderr: -input: - | (* *) + (*) + (*) -./calc.at:1434: $PREPARSER ./calc input -stderr: -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -./calc.at:1433: cat stderr -input: -./calc.at:1437: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (- *) + (1 2) = 1 -stderr: -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1440: cat stderr -./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -525. calc.at:1433: ok -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ -stderr: -input: -./calc.at:1437: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1440: $PREPARSER ./calc input -input: -stderr: -./calc.at:1434: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 8 20 4 12 +Stack now 0 6 8 19 4 12 Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) Entering state 1 -Stack now 0 8 20 4 12 20 1 +Stack now 0 6 8 19 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) +Next token is token ')' (10.11: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token '=' (10.13: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Stack now 0 8 20 4 12 20 1 +Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 +Next token is token '\n' (10.16-11.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 4 Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 6 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 -Stack now 0 8 20 4 12 +Stack now 0 6 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) Entering state 1 -Stack now 0 8 20 4 12 21 1 +Stack now 0 6 4 12 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) +Next token is token ')' (13.5: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) +Next token is token '=' (13.9: ) +Reducing stack by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 + 2 * 3 + !- ++ -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1426: $PREPARSER ./calc input -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -./calc.at:1438: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -132796,12 +131329,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -input: -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -133119,7 +131648,7 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -133131,13 +131660,23 @@ ' expout || exit 77 stderr: ./calc.at:1438: cat stderr -stderr: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +input: +./calc.at:1426: cat stderr stdout: +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 2 +./calc.at:1440: $PREPARSER ./calc input ./calc.at:1437: cat stderr -stderr: -./calc.at:1434: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: ./calc.at:1441: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -133149,42 +131688,32 @@ || /\t/ )' calc.cc -input: -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) + | (!!) + (1 2) = 1 +stderr: ./calc.at:1438: $PREPARSER ./calc input -input: -540. calc.at:1455: testing Calculator C++ %locations parse.lac=full parse.error=detailed parse.trace ... -./calc.at:1455: mv calc.y.tmp calc.y - +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: - | 1 + 2 * 3 + !- ++ - | (#) + (#) = 2222 -./calc.at:1455: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -input: -./calc.at:1440: cat stderr -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1434: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -133194,132 +131723,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 20 4 -Shifting token error (1.10: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -133336,29 +131854,13 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1441: $PREPARSER ./calc input -stderr: + | (- *) + (1 2) = 1 +./calc.at:1426: $PREPARSER ./calc input stderr: -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -133368,132 +131870,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) Stack now 0 8 20 4 -Shifting token error (1.10: ) +Shifting token error (1.9-11: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token '=' (1.14: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -133510,9 +132001,60 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 + | (# + 1) = 1111 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1437: $PREPARSER ./calc input +syntax error +syntax error +error: 2222 != 1 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -134531,13 +133073,7 @@ Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (!!) + (1 2) = 1 -./calc.at:1440: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -134547,147 +133083,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1438: cat stderr +syntax error +syntax error +error: 2222 != 1 +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1440: cat stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -135706,161 +134111,7 @@ Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Stack now 0 4 -Shifting token error (1.2-3: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1437: cat stderr -./calc.at:1438: cat stderr -./calc.at:1434: "$PERL" -pi -e 'use strict; +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -135871,7 +134122,9 @@ }eg ' expout || exit 77 input: -./calc.at:1440: "$PERL" -pi -e 'use strict; + | (- *) + (1 2) = 1 +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -135881,238 +134134,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1441: $PREPARSER ./calc input -input: -input: - | (#) + (#) = 2222 -./calc.at:1440: cat stderr - | (1 + #) = 1111 -./calc.at:1437: $PREPARSER ./calc input -./calc.at:1426: $PREPARSER ./calc input -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 - | 1 + 2 * 3 + !+ ++ -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1455: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: input: stderr: - | (- *) + (1 2) = 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -1.6: syntax error: invalid character: '#' -./calc.at:1440: $PREPARSER ./calc input -stderr: -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: cat stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -136261,27 +134284,47 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +./calc.at:1441: $PREPARSER ./calc input + | 1//2 +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1437: cat stderr stderr: -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -136426,37 +134469,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -input: - | (#) + (#) = 2222 -./calc.at:1434: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1441: cat stderr -stderr: -stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' +./calc.at:1426: cat stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -136471,92 +134485,20 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: cat stderr -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1440: cat stderr -stderr: -stderr: input: + | (* *) + (*) + (*) +stderr: + | (1 + # + 1) = 1111 +./calc.at:1437: $PREPARSER ./calc input +./calc.at:1426: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -136585,101 +134527,6 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (# + 1) = 1111 -./calc.at:1426: cat stderr -./calc.at:1437: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: - | (* *) + (*) + (*) -./calc.at:1440: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | (1 + #) = 1111 ./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -136690,7 +134537,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1426: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -136706,20 +134555,60 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) +Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 +syntax error +syntax error +syntax error +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error +syntax error +syntax error +1.6: syntax error: invalid character: '#' +./calc.at:1440: cat stderr +./calc.at:1441: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1437: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -136871,9 +134760,10 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; +input: +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -136883,15 +134773,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' +./calc.at:1437: cat stderr + | error +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1440: $PREPARSER ./calc input +stderr: stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -137039,7 +134958,56 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1437: "$PERL" -pi -e 'use strict; +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -137049,7 +135017,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (1 + 1) / (1 - 1) +./calc.at:1437: $PREPARSER ./calc input +input: +stderr: +1.11-17: error: null divisor ./calc.at:1438: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: $PREPARSER ./calc input ./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -137060,16 +135036,115 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1434: cat stderr -./calc.at:1437: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: ./calc.at:1441: cat stderr input: - | (#) + (#) = 2222 -input: +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ ./calc.at:1438: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1426: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1440: cat stderr +1.11-17: error: null divisor +stderr: +input: +./calc.at:1426: $EGREP -c -v 'Return for a new token:|LAC:' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | error +./calc.at:1441: $PREPARSER ./calc input +input: +stderr: +./calc.at:1437: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -137079,9 +135154,383 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1434: $PREPARSER ./calc input + | 1 = 2 = 3 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1438: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1440: $PREPARSER ./calc input +input: +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1426: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: +./calc.at:1437: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: $PREPARSER ./calc input +stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +528. calc.at:1437: ok stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -137092,9 +135541,143 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error +./calc.at:1441: cat stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: cat stderr +input: +./calc.at:1438: cat stderr + | 1 = 2 = 3 ./calc.at:1441: $PREPARSER ./calc input + +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: cat stderr +input: +input: + | + | +1 +./calc.at:1440: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1438: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 input: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (#) + (#) = 2222 +./calc.at:1426: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -137218,33 +135801,48 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + # + 1) = 1111 -./calc.at:1437: $PREPARSER ./calc input stderr: ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -./calc.at:1426: cat stderr -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: cat stderr stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) Stack now 0 -1.6: syntax error: invalid character: '#' -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: -stderr: - | (# + 1) = 1111 -./calc.at:1426: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -137368,26 +135966,11 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.6: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -input: -1.6: syntax error: invalid character: '#' - | 1 + 2 * 3 + !+ ++ +./calc.at:1441: cat stderr stderr: syntax error: invalid character: '#' -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: $PREPARSER ./calc input -stderr: syntax error: invalid character: '#' -./calc.at:1438: "$PERL" -pi -e 'use strict; +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -137397,8 +135980,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1437: "$PERL" -pi -e 'use strict; +input: +541. calc.at:1457: testing Calculator C++ parse.error=custom ... +./calc.at:1457: mv calc.y.tmp calc.y + +./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -137408,110 +135994,40 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1440: cat stderr + | + | +1 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1438: cat stderr +stderr: +./calc.at:1440: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1437: cat stderr -stderr: -./calc.at:1438: cat stderr +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -137522,197 +136038,57 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: cat stderr -./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1437: $PREPARSER ./calc input stderr: -./calc.at:1434: cat stderr -1.11-17: error: null divisor -input: -./calc.at:1437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1426: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1438: $PREPARSER ./calc input - | 1 + 2 * 3 + !- ++ -./calc.at:1440: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1441: $PREPARSER ./calc input stderr: input: -input: -stderr: -stderr: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1426: $PREPARSER ./calc input -1.11-17: error: null divisor Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 + | (1 + #) = 1111 +./calc.at:1438: $PREPARSER ./calc input ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' +./calc.at:1426: cat stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -137812,67 +136188,17 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (# + 1) = 1111 -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1434: $PREPARSER ./calc input ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) Stack now 0 -./calc.at:1437: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -syntax error: invalid character: '#' -./calc.at:1437: cat stderr -stderr: -stderr: Starting parse Entering state 0 Stack now 0 @@ -137972,138 +136298,11 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.2: syntax error: invalid character: '#' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -528. calc.at:1437: ./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - ok +./calc.at:1441: cat stderr +input: + | (1 + #) = 1111 +./calc.at:1441: $PREPARSER ./calc /dev/null +./calc.at:1426: $PREPARSER ./calc input ./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -138114,7 +136313,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: ./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -138125,47 +136323,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error: invalid character: '#' -./calc.at:1440: cat stderr -./calc.at:1426: cat stderr +stderr: +stderr: +syntax error: invalid character: '#' +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1438: cat stderr +./calc.at:1440: cat stderr +./calc.at:1457: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS input: -./calc.at:1441: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1426: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1438: $PREPARSER ./calc input stderr: -input: -error: null divisor -./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1440: $PREPARSER ./calc input stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -error: null divisor -input: -input: - +syntax error: invalid character: '#' stderr: - | (# + 1) = 1111 Starting parse Entering state 0 Stack now 0 @@ -138188,91 +136368,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -138289,12 +136442,18 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1438: $PREPARSER ./calc input -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1441: $PREPARSER ./calc input +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +input: stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 Starting parse Entering state 0 Stack now 0 @@ -138391,9 +136550,52 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1440: $PREPARSER ./calc input stderr: -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +stdout: +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: cat stderr +./calc.at:1445: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + Starting parse Entering state 0 Stack now 0 @@ -138403,104 +136605,298 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 20 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -138517,34 +136913,9 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1438: cat stderr stderr: -./calc.at:1434: cat stderr Starting parse Entering state 0 Stack now 0 @@ -138554,77 +136925,298 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -138641,99 +137233,29 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1426: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 input: - | (1 + # + 1) = 1111 -./calc.at:1434: $PREPARSER ./calc input -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1440: cat stderr -541. calc.at:1457: testing Calculator C++ parse.error=custom ... -./calc.at:1457: mv calc.y.tmp calc.y - -./calc.at:1457: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -./calc.at:1426: cat stderr -./calc.at:1438: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1441: cat stderr input: - | (1 + #) = 1111 +./calc.at:1426: cat stderr input: -./calc.at:1440: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1445: $PREPARSER ./calc input | (1 + # + 1) = 1111 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1438: $PREPARSER ./calc input -522. calc.at:1426: ok -./calc.at:1441: $PREPARSER ./calc /dev/null -stderr: -stderr: -./calc.at:1434: "$PERL" -pi -e 'use strict; +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -138744,115 +137266,8 @@ }eg ' expout || exit 77 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Stack now 0 @@ -138969,527 +137384,1024 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '=' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (7) +Shifting token number (7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 4 1 +Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 6 8 20 29 21 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1434: cat stderr -./calc.at:1457: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1438: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: cat stderr -./calc.at:1441: cat stderr -./calc.at:1438: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1434: $PREPARSER ./calc input -stderr: -input: -input: -1.11-17: error: null divisor -input: - | (1 + 1) / (1 - 1) - | (# + 1) = 1111 -./calc.at:1440: $PREPARSER ./calc input -./calc.at:1434: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1438: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1441: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 +Stack now 0 6 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 4 2 10 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token ')' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 18 2 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4) +Shifting token number (4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 4 1 +Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 4 12 +Stack now 0 6 8 19 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 4 12 20 1 +Stack now 0 6 8 19 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) +Next token is token ')' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Stack now 0 6 8 19 4 12 +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 4 12 26 +Stack now 0 6 8 19 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 22 4 +Stack now 0 6 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 22 4 1 +Stack now 0 6 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 22 4 12 +Stack now 0 6 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 4 12 23 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 6 4 12 23 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token ')' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Stack now 0 6 4 12 +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 8 22 4 12 26 +Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -stderr: -542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +Cleanup: popping token end of input () +Cleanup: popping nterm input () Starting parse Entering state 0 Stack now 0 @@ -139807,9 +138719,13 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1458: mv calc.y.tmp calc.y - ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1440: cat stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: $PREPARSER ./calc input +stderr: +stderr: stderr: stderr: Starting parse @@ -139821,77 +138737,96 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token error (1.2-8: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error (1.2-10: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -139908,149 +138843,1026 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '=' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (7) +Shifting token number (7) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (7) +-> $$ = nterm exp (7) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 +Stack now 0 6 8 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 4 12 20 1 +Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 -Stack now 0 4 12 20 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 6 8 20 29 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 20 29 21 2 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 2 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 2 10 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Stack now 0 6 2 10 23 32 +Reading a token +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 4 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 4 2 10 +Reading a token +Next token is token ')' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Stack now 0 6 4 12 +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 4 12 26 +Stack now 0 6 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 2 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 2 2 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 2 2 2 10 +Reading a token +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4) +Shifting token number (4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 79): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 +Stack now 0 6 8 18 2 10 +Reading a token +Next token is token '\n' () +Reducing stack by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 22 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Stack now 0 8 22 4 1 +Stack now 0 6 8 19 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 22 4 12 +Stack now 0 6 8 19 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) +Next token is token '-' () +Shifting token '-' () Entering state 19 -Stack now 0 8 22 4 12 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 6 8 19 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 28 -Stack now 0 8 22 4 12 19 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' () Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Stack now 0 6 8 19 4 12 +Next token is token ')' () +Shifting token ')' () Entering state 26 -Stack now 0 8 22 4 12 26 +Stack now 0 6 8 19 4 12 26 Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Stack now 0 6 8 19 28 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '=' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Stack now 0 8 24 +Stack now 0 6 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 8 23 32 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 +Reading a token +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' () +Reducing stack by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -140368,16 +140180,10 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1434: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: + | (!!) + (1 2) = 1 +./calc.at:1440: $PREPARSER ./calc input +stderr: ./calc.at:1438: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -140388,16 +140194,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -140408,20 +140204,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1440: cat stderr -./calc.at:1438: cat stderr -./calc.at:1441: cat stderr -./calc.at:1434: cat stderr -529. calc.at:1438: ok -input: - | (1 + # + 1) = 1111 -./calc.at:1440: $PREPARSER ./calc input -input: +syntax error: invalid character: '#' stderr: -526. calc.at:1434: ok - | (!!) + (1 2) = 1 -./calc.at:1458: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1441: $PREPARSER ./calc input +input: Starting parse Entering state 0 Stack now 0 @@ -140431,96 +140216,121 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error (1.2-10: ) +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -140538,9 +140348,33 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1438: cat stderr +./calc.at:1441: cat stderr stderr: - +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 +Cleanup: discarding lookahead token number (2) +Stack now 0 stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -140681,6 +140515,10 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | (1 + 1) / (1 - 1) +./calc.at:1438: $PREPARSER ./calc input +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -140705,81 +140543,107 @@ Entering state 20 Stack now 0 4 12 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 22 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -140796,8 +140660,52 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +./calc.at:1438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: + | (!!) + (1 2) = 1 +./calc.at:1441: $PREPARSER ./calc input +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 +Cleanup: discarding lookahead token number (2) +Stack now 0 +stderr: stderr: Starting parse Entering state 0 @@ -140939,69 +140847,7 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1440: cat stderr -./calc.at:1441: cat stderr -543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full ... -stderr: -./calc.at:1459: mv calc.y.tmp calc.y - -stdout: -input: -./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y - | (1 + 1) / (1 - 1) -./calc.at:1435: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -input: -./calc.at:1440: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1441: $PREPARSER ./calc input -stderr: -stderr: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1435: $PREPARSER ./calc input -stderr: +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -141143,8 +140989,188 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: cat stderr +./calc.at:1426: cat stderr +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1438: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (- *) + (1 2) = 1 +./calc.at:1440: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 +./calc.at:1445: cat stderr +./calc.at:1438: cat stderr +stderr: +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -141293,9 +141319,18 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +529. calc.at:1438: ok +./calc.at:1441: cat stderr stderr: +input: stderr: + | 1//2 +./calc.at:1445: $PREPARSER ./calc input +syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -141305,113 +141340,679 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Stack now 0 8 +Error: popping nterm exp (1) +Stack now 0 +Cleanup: discarding lookahead token '/' () +Stack now 0 +input: + | (- *) + (1 2) = 1 +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: $PREPARSER ./calc input + +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 -Stack now 0 4 12 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 4 12 20 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 -Stack now 0 4 12 20 29 +Stack now 0 8 20 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1426: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' () +Shifting token '/' () Entering state 22 Stack now 0 8 22 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Stack now 0 8 +Error: popping nterm exp (1) +Stack now 0 +Cleanup: discarding lookahead token '/' () +Stack now 0 +./calc.at:1440: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 22 4 +Stack now 0 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 8 22 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Stack now 0 8 22 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1426: cat stderr + | (* *) + (*) + (*) +./calc.at:1440: $PREPARSER ./calc input +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +./calc.at:1445: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -141419,7 +142020,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -141437,11 +142038,19 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr -544. calc.at:1468: testing Calculator glr.cc ... -./calc.at:1468: mv calc.y.tmp calc.y +542. calc.at:1458: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1458: mv calc.y.tmp calc.y +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1441: cat stderr +./calc.at:1426: $PREPARSER ./calc input +./calc.at:1458: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +stderr: +error: null divisor +input: +./calc.at:1426: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -141451,26 +142060,22 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) Stack now 0 4 -Shifting token error (1.2-4: ) +Shifting token error (1.2: ) Entering state 11 Stack now 0 4 11 +Reading a token Next token is token '*' (1.4: ) Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) +Error: popping token error (1.2: ) Stack now 0 4 Shifting token error (1.2-4: ) Entering state 11 @@ -141498,73 +142103,80 @@ Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) Stack now 0 8 20 4 -Shifting token error (1.10-12: ) +Shifting token error (1.10: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.15: ) +Next token is token '+' (1.13: ) Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1-5: 1111) $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -141572,7 +142184,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -141590,43 +142202,26 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1440: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | error input: - | 1 2 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1440: cat stderr -stderr: -./calc.at:1459: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1441: cat stderr -1.3: syntax error, unexpected number -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -530. calc.at:1440: ok +./calc.at:1445: $PREPARSER ./calc input stderr: -1.3: syntax error, unexpected number -input: | (* *) + (*) + (*) ./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: "$PERL" -pi -e 'use strict; +stderr: +stdout: +stderr: +error: null divisor +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +Stack now 0 +stderr: +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141636,7 +142231,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1435: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + Starting parse Entering state 0 Stack now 0 @@ -141789,10 +142394,33 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1435: cat stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: cat stderr +input: stderr: -./calc.at:1468: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +stderr: +./calc.at:1435: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +Stack now 0 Starting parse Entering state 0 Stack now 0 @@ -141944,26 +142572,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: - | 1//2 -./calc.at:1435: $PREPARSER ./calc input -stdout: stderr: -./calc.at:1454: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1426: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -141973,44 +142583,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1440: $PREPARSER ./calc input stderr: -./calc.at:1441: cat stderr -545. calc.at:1469: testing Calculator glr2.cc ... -./calc.at:1469: mv calc.y.tmp calc.y - - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -stderr: -stdout: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -142020,28 +142598,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1441: $PREPARSER ./calc input -input: -./calc.at:1435: cat stderr -./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1445: $PREPARSER ./calc input stderr: +./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -142122,1037 +142680,565 @@ Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1426: cat stderr +./calc.at:1441: cat stderr stderr: +./calc.at:1445: cat stderr +522. calc.at:1426: ok input: -input: - | error +./calc.at:1458: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' () +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' () +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1440: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1 2 +./calc.at:1435: $PREPARSER ./calc input +input: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1441: $PREPARSER ./calc input +input: +1.3: syntax error, unexpected number +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1445: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 -Stack now 0 6 8 20 +Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 20 29 21 2 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 -Reading a token -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' () +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' () +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 2 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1440: $PREPARSER ./calc input +stderr: +stderr: +1.3: syntax error, unexpected number +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 6 2 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 2 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 2 10 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 4 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 8 23 +Stack now 0 8 18 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Stack now 0 6 8 23 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 32 -Stack now 0 6 8 23 32 +Entering state 27 +Stack now 0 8 18 27 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 6 8 18 +syntax error, unexpected '=' +Error: popping nterm exp (2) +Stack now 0 8 18 +Error: popping token '=' () +Stack now 0 8 +Error: popping nterm exp (1) +Stack now 0 +Cleanup: discarding lookahead token '=' () +Stack now 0 +stderr: +stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 2 2 +Stack now 0 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 2 2 2 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 6 8 18 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 18 2 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 18 2 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 8 18 2 10 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 6 8 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '-' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 18 2 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Stack now 0 6 8 18 2 10 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '\n' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 6 8 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 6 8 19 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 6 8 19 4 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 6 8 19 4 12 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 -Reading a token -Next token is token ')' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Stack now 0 6 8 19 28 +Stack now 0 8 Reading a token Next token is token '=' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Stack now 0 6 8 23 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 8 23 32 23 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 +Stack now 0 8 18 27 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 6 8 18 +syntax error, unexpected '=' +Error: popping nterm exp (2) +Stack now 0 8 18 +Error: popping token '=' () +Stack now 0 8 +Error: popping nterm exp (1) +Stack now 0 +Cleanup: discarding lookahead token '=' () +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 -Stack now 0 6 8 18 27 -Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 4 12 23 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 4 12 23 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Stack now 0 6 4 12 23 32 -Reading a token -Next token is token ')' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Stack now 0 6 8 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 8 23 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 23 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Stack now 0 6 8 23 32 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (64) -Shifting token number (64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) -Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1435: $PREPARSER ./calc input - | 1 2 -stderr: -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1435: cat stderr +./calc.at:1441: $PREPARSER ./calc input stderr: +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -143223,110 +143309,303 @@ Entering state 5 Stack now 0 8 20 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -1.3: syntax error, unexpected number -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1//2 +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1445: cat stderr +543. calc.at:1459: testing Calculator C++ parse.error=custom %locations api.prefix={calc} %parse-param {semantic_value *result}{int *count}{int *nerrs} parse.lac=full ... +./calc.at:1440: cat stderr +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' () +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' () +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1459: mv calc.y.tmp calc.y + +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1459: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | + | +1 +input: +./calc.at:1445: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (#) + (#) = 2222 +./calc.at:1440: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Stack now 0 +Cleanup: discarding lookahead token '+' () +Stack now 0 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +stderr: +./calc.at:1441: cat stderr +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' () -> $$ = nterm line () Entering state 7 Stack now 0 7 @@ -143336,913 +143615,1747 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Stack now 0 +Cleanup: discarding lookahead token '+' () +Stack now 0 +stderr: +input: +./calc.at:1435: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 -Stack now 0 6 8 20 +Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Stack now 0 6 8 20 29 +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 6 8 20 29 21 +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 20 29 21 2 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' () -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (#) + (#) = 2222 +./calc.at:1441: $PREPARSER ./calc input +stderr: +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' () +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 18 2 +Stack now 0 8 18 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 6 8 18 2 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' () +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | error +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1440: cat stderr +stderr: +./calc.at:1445: cat stderr +stderr: +1.1: syntax error, unexpected invalid token +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 2 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1445: $PREPARSER ./calc /dev/null +stderr: +input: + | (1 + #) = 1111 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token end of input () +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +Stack now 0 +./calc.at:1440: $PREPARSER ./calc input +stderr: +./calc.at:1459: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +1.1: syntax error, unexpected invalid token +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 2 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 2 10 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 2 10 23 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (2) -Shifting token number (2) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 2 10 23 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Stack now 0 6 2 10 23 32 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 18 2 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token end of input () +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +Stack now 0 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 8 18 2 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 8 18 2 10 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '\n' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' () +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: cat stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1441: $PREPARSER ./calc input +stderr: +input: +./calc.at:1440: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 4 2 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 4 2 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 4 2 10 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token ')' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 8 23 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: cat stderr + | 1 = 2 = 3 +./calc.at:1435: $PREPARSER ./calc input +stderr: +stderr: +input: +1.7: syntax error, unexpected '=' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 6 8 23 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Stack now 0 6 8 23 32 + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1445: $PREPARSER ./calc input + | (# + 1) = 1111 +./calc.at:1440: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 2 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 2 2 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 2 2 2 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 2 2 2 10 +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 18 2 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 18 2 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' () +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Stack now 0 6 8 +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 6 8 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 Reading a token -Next token is token '-' () -Reducing stack by rule 8 (line 91): +Next token is token '+' () +Reducing stack by rule 7 (line 90): $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 6 8 19 + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) -> $$ = nterm exp (3) -Entering state 28 -Stack now 0 6 8 19 28 +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token '=' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Stack now 0 8 20 4 12 +Error: popping nterm exp (3) +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 6 8 18 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Stack now 0 6 8 18 2 10 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '\n' () -Reducing stack by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 6 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 6 8 19 +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 6 8 19 4 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Stack now 0 6 8 19 4 1 +Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 12 -Stack now 0 6 8 19 4 12 +Entering state 30 +Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 6 8 19 4 12 19 +Next token is token '*' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Stack now 0 8 20 4 12 +Error: popping nterm exp (2) +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 Reading a token Next token is token ')' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' () Shifting token ')' () -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Stack now 0 6 8 19 28 +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '=' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (2) + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Stack now 0 6 8 +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +1.7: syntax error, unexpected '=' +stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 8 23 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Stack now 0 6 8 23 32 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 8 23 32 23 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1441: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 -Stack now 0 6 4 +Stack now 0 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 4 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 4 12 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 4 12 23 1 +Stack now 0 8 20 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) -> $$ = nterm exp (2) -Entering state 32 -Stack now 0 6 4 12 23 32 +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 4 12 20 Reading a token -Next token is token ')' () -Reducing stack by rule 12 (line 103): +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 -Stack now 0 6 4 12 +Stack now 0 8 20 4 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Stack now 0 8 20 4 12 +Error: popping nterm exp (3) +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Stack now 0 6 8 23 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 23 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 -Stack now 0 6 8 23 32 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Stack now 0 8 20 4 12 +Error: popping nterm exp (2) +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token Next token is token '=' () -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (64) + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (64) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Stack now 0 6 @@ -144254,100 +145367,8 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -stderr: -1.1: syntax error, unexpected invalid token -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1441: $PREPARSER ./calc input -input: -1.3: syntax error, unexpected number - | 1 2 -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1441: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -144358,29 +145379,28 @@ }eg ' expout || exit 77 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token number (2) -Stack now 0 -./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1435: cat stderr +./calc.at:1440: cat stderr +stdout: +input: +./calc.at:1453: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + | (# + 1) = 1111 +./calc.at:1441: $PREPARSER ./calc input +input: stderr: -./calc.at:1454: "$PERL" -pi -e 'use strict; +input: + | (1 + # + 1) = 1111 +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -144390,152 +145410,130 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1440: $PREPARSER ./calc input +input: + | + | +1 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: cat stderr -input: -./calc.at:1454: cat stderr -stderr: - | 1 = 2 = 3 -./calc.at:1435: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token number (2) -Stack now 0 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1//2 -stderr: -./calc.at:1441: cat stderr -./calc.at:1454: $PREPARSER ./calc input -1.7: syntax error, unexpected '=' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -input: -1.7: syntax error, unexpected '=' - | (#) + (#) = 2222 stderr: -./calc.at:1441: $PREPARSER ./calc input -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' ./calc.at:1445: cat stderr +2.1: syntax error, unexpected '+' stderr: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -144558,91 +145556,179 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 -Stack now 0 8 20 4 +Stack now 0 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -144659,7 +145745,9 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -144669,22 +145757,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1435: cat stderr - | 1//2 -./calc.at:1445: $PREPARSER ./calc input +2.1: syntax error, unexpected '+' +./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (!!) + (1 2) = 1 stderr: +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1441: cat stderr Starting parse Entering state 0 Stack now 0 @@ -144694,104 +145774,96 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) Stack now 0 4 -Shifting token error (1.1-2: ) +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -144809,91 +145881,148 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1454: cat stderr +input: Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '/' () -Stack now 0 -input: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1441: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1435: $PREPARSER ./calc input -stderr: -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '/' () -Stack now 0 - | error -./calc.at:1454: $PREPARSER ./calc input -2.1: syntax error, unexpected '+' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1441: cat stderr -stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | (1 + #) = 1111 -./calc.at:1445: "$PERL" -pi -e 'use strict; +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -144903,11 +146032,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (1 + # + 1) = 1111 ./calc.at:1441: $PREPARSER ./calc input -2.1: syntax error, unexpected '+' -stderr: +input: stderr: -1.1: syntax error, unexpected invalid token Starting parse Entering state 0 Stack now 0 @@ -144949,48 +146077,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -145007,9 +146151,12 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1453: $PREPARSER ./calc input ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: cat stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; +./calc.at:1435: cat stderr +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145020,6 +146167,148 @@ }eg ' expout || exit 77 stderr: +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () Starting parse Entering state 0 Stack now 0 @@ -145061,48 +146350,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -145119,7 +146424,15 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1454: "$PERL" -pi -e 'use strict; +1.3: syntax error, unexpected number +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: $PREPARSER ./calc /dev/null +./calc.at:1440: cat stderr +stderr: +1.1: syntax error, unexpected end of input +stderr: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145129,22 +146442,178 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.3: syntax error, unexpected number input: -./calc.at:1435: cat stderr - | error -./calc.at:1445: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) stderr: -./calc.at:1454: cat stderr -./calc.at:1435: $PREPARSER ./calc /dev/null +./calc.at:1440: $PREPARSER ./calc input +1.1: syntax error, unexpected end of input +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1441: cat stderr +stderr: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -Stack now 0 -./calc.at:1441: "$PERL" -pi -e 'use strict; +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1445: cat stderr +./calc.at:1440: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145154,113 +146623,431 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error, unexpected end of input -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: cat stderr input: -./calc.at:1441: cat stderr -stderr: - | 1 = 2 = 3 -./calc.at:1454: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -Stack now 0 -stderr: -1.7: syntax error, unexpected '=' +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: -1.1: syntax error, unexpected end of input -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 + | (1 + 1) / (1 - 1) ./calc.at:1441: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1445: $PREPARSER ./calc input stderr: stderr: +./calc.at:1435: cat stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -145277,19 +147064,13 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: ./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error, unexpected '=' -./calc.at:1445: "$PERL" -pi -e 'use strict; + | 1//2 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1440: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145300,86 +147081,280 @@ }eg ' expout || exit 77 stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Stack now 0 4 -Shifting token error (1.1-4: ) +Shifting token error () Entering state 11 Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () Stack now 0 4 -Shifting token error (1.1-6: ) +Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (2) +Error: discarding token number (2) +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' () +Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -145396,11 +147371,23 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1440: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1435: $PREPARSER ./calc input stderr: -stdout: -./calc.at:1435: cat stderr -./calc.at:1445: cat stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; +stderr: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +530. calc.at:1440: ok +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1441: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145410,18 +147397,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1443: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh +stderr: +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1441: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: cat stderr +531. calc.at:1441: ok +./calc.at:1453: cat stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -145432,78 +147439,392 @@ }eg ' expout || exit 77 input: -./calc.at:1454: cat stderr - | 1 = 2 = 3 -./calc.at:1441: cat stderr +./calc.at:1435: cat stderr + | (* *) + (*) + (*) ./calc.at:1445: $PREPARSER ./calc input +stderr: +stderr: input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1443: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () input: +stdout: + | error +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + + | (!!) + (1 2) = 1 +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1435: $PREPARSER ./calc input +stderr: +stderr: + +1.1: syntax error, unexpected invalid token +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Stack now 0 8 18 -Error: popping token '=' () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '=' () -Stack now 0 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1435: $PREPARSER ./calc input +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' () +Error: discarding token '*' () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () input: stderr: - | - | +1 -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -2.1: syntax error, unexpected '+' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1443: $PREPARSER ./calc input +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +1.1: syntax error, unexpected invalid token stderr: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -146521,59 +148842,16 @@ Stack now 0 6 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -input: +544. calc.at:1468: testing Calculator glr.cc ... +./calc.at:1468: mv calc.y.tmp calc.y + ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1441: $PREPARSER ./calc input +./calc.at:1453: cat stderr +./calc.at:1435: cat stderr +./calc.at:1445: cat stderr +./calc.at:1468: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Stack now 0 8 18 -Error: popping token '=' () -Stack now 0 8 -Error: popping nterm exp (1) -Stack now 0 -Cleanup: discarding lookahead token '=' () -Stack now 0 +input: Starting parse Entering state 0 Stack now 0 @@ -147592,143 +149870,126 @@ Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) ./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1 = 2 = 3 +input: +input: +./calc.at:1453: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1445: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1435: $PREPARSER ./calc input +545. calc.at:1469: testing Calculator glr2.cc ... +stderr: stderr: stderr: +1.7: syntax error, unexpected '=' +stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1469: mv calc.y.tmp calc.y + Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stdout: | 1 2 ./calc.at:1443: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1469: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1454: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -147749,125 +150010,95 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1454: cat stderr +1.7: syntax error, unexpected '=' +stderr: +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 4 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -147888,19 +150119,8 @@ Stack now 0 Cleanup: discarding lookahead token number (1.3: 2) Stack now 0 -./calc.at:1454: $PREPARSER ./calc /dev/null -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; +input: +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147910,8 +150130,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error, unexpected end of file -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147921,10 +150140,109 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1454: $PREPARSER ./calc input +stderr: +input: +./calc.at:1435: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1453: cat stderr ./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: $PREPARSER ./calc input stderr: -./calc.at:1445: cat stderr -./calc.at:1441: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147934,15 +150252,119 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error, unexpected end of file -./calc.at:1435: cat stderr -./calc.at:1443: cat stderr +stderr: +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1441: cat stderr +./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: + | (* *) + (*) + (*) +stderr: | | +1 -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1454: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' () +Reducing stack by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Stack now 0 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Stack now 0 8 20 +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1453: $PREPARSER ./calc input +./calc.at:1443: cat stderr +./calc.at:1469: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +stderr: +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +2.1: syntax error, unexpected '+' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | 1//2 +./calc.at:1443: $PREPARSER ./calc input +input: +2.1: syntax error, unexpected '+' +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' + | 1 2 +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -147952,41 +150374,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1//2 -input: -./calc.at:1443: $PREPARSER ./calc input -stderr: +./calc.at:1454: $PREPARSER ./calc input stderr: - | (!!) + (1 2) = 1 -./calc.at:1435: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Stack now 0 -Cleanup: discarding lookahead token '+' () -Stack now 0 stderr: -./calc.at:1454: cat stderr +1.3: syntax error, unexpected number Starting parse Entering state 0 Stack now 0 @@ -148014,14 +150405,30 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -input: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1441: $PREPARSER ./calc input -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1445: cat stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -148050,33 +150457,7 @@ Stack now 0 Cleanup: discarding lookahead token '/' (1.3: ) Stack now 0 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Stack now 0 -Cleanup: discarding lookahead token '+' () -Stack now 0 +./calc.at:1453: cat stderr ./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -148087,339 +150468,151 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1435: cat stderr +1.3: syntax error, unexpected number input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1454: $PREPARSER ./calc input +./calc.at:1453: $PREPARSER ./calc /dev/null ./calc.at:1443: cat stderr + | (#) + (#) = 2222 +./calc.at:1445: $PREPARSER ./calc input +input: +stderr: +1.1: syntax error, unexpected end of input stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 -Stack now 0 8 22 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 8 20 4 +Shifting token error () +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token '=' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (2222) +Shifting token number (2222) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 + $1 = token number (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) +Next token is token end of input () +Shifting token end of input () Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: +Cleanup: popping token end of input () +Cleanup: popping nterm input () | error ./calc.at:1443: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1435: $PREPARSER ./calc input +1.1: syntax error, unexpected end of input stderr: -stderr: -./calc.at:1435: cat stderr -./calc.at:1445: cat stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stdout: -./calc.at:1441: "$PERL" -pi -e 'use strict; +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -148429,39 +150622,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1451: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: $PREPARSER ./calc /dev/null -stderr: -./calc.at:1441: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input () -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Stack now 0 stderr: -input: Starting parse Entering state 0 Stack now 0 @@ -148470,39 +150631,14 @@ 1.1: syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -input: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | (- *) + (1 2) = 1 -./calc.at:1435: $PREPARSER ./calc input +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1454: cat stderr -./calc.at:1451: $PREPARSER ./calc input -531. calc.at:1441: ok +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -148512,97 +150648,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token end of input () -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Stack now 0 -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1454: $PREPARSER ./calc input -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1443: cat stderr -stderr: -stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1445: cat stderr -input: -stderr: -input: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | 1 = 2 = 3 -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 2 -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1451: $PREPARSER ./calc input -stderr: -stderr: -1.3: syntax error -stderr: -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1435: cat stderr -stdout: -./calc.at:1449: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - Starting parse Entering state 0 Stack now 0 @@ -148612,182 +150666,31 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Stack now 0 8 20 4 12 -Error: popping nterm exp (3) -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () Error: popping token error () -Stack now 0 8 20 4 +Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 @@ -148798,55 +150701,13 @@ Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' () -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Stack now 0 8 20 4 12 -Error: popping nterm exp (2) -Stack now 0 8 20 4 +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token invalid token () +Error: discarding token invalid token () Error: popping token error () Stack now 0 8 20 4 Shifting token error () @@ -148867,10 +150728,10 @@ Reading a token Next token is token '=' () Reducing stack by rule 7 (line 90): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (4444) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 Next token is token '=' () @@ -148878,23 +150739,22 @@ Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (4444) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 Next token is token '\n' () @@ -148902,7 +150762,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (4444) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -148920,6 +150780,80 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () +./calc.at:1453: cat stderr +input: + | 1//2 +./calc.at:1454: $PREPARSER ./calc input +input: +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1435: $PREPARSER ./calc input +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1453: $PREPARSER ./calc input +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1443: cat stderr +stderr: +stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1445: cat stderr +stdout: +stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: + | 1 = 2 = 3 +./calc.at:1443: $PREPARSER ./calc input +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +input: +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +stderr: +stderr: +input: + | (1 + #) = 1111 +./calc.at:1445: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -148959,12 +150893,23 @@ Stack now 0 Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 -./calc.at:1454: cat stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error stderr: -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -148974,289 +150919,71 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 8 20 4 12 20 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 +Stack now 0 4 12 Reading a token Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 4 12 20 +Stack now 0 4 12 20 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Error: popping token '+' () -Stack now 0 8 20 4 12 -Error: popping nterm exp (3) -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 +Stack now 0 4 12 +Error: popping nterm exp (1) +Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () Error: popping token error () -Stack now 0 8 20 4 +Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '*' () -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Stack now 0 8 20 4 12 -Error: popping nterm exp (2) -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Stack now 0 8 Next token is token '=' () Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (4444) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Next token is token '\n' () @@ -149264,7 +150991,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (4444) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -149282,29 +151009,8 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -input: -input: - | (- *) + (1 2) = 1 -input: -./calc.at:1454: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | (* *) + (*) + (*) -./calc.at:1449: $PREPARSER ./calc input -stderr: -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149314,15 +151020,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -546. calc.at:1476: testing Calculator C++ %glr-parser ... -./calc.at:1476: mv calc.y.tmp calc.y - Starting parse Entering state 0 Stack now 0 @@ -149363,8 +151082,112 @@ Cleanup: discarding lookahead token '=' (1.7: ) Stack now 0 stderr: +./calc.at:1435: cat stderr +./calc.at:1454: cat stderr +./calc.at:1453: cat stderr +./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 +Error: popping nterm exp (1) +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149374,24 +151197,59 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +input: + | (!!) + (1 2) = 1 +./calc.at:1453: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1435: $PREPARSER ./calc input +input: + | error +./calc.at:1454: $PREPARSER ./calc input +stderr: + | 1 2 +./calc.at:1457: $PREPARSER ./calc input +stderr: +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1443: cat stderr +1.1: syntax error, unexpected invalid token +stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 ./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +input: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +1.1: syntax error, unexpected invalid token + | + | +1 +./calc.at:1443: $PREPARSER ./calc input ./calc.at:1445: cat stderr stderr: stderr: -./calc.at:1451: cat stderr -input: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1454: "$PERL" -pi -e 'use strict; +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +stderr: +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149401,10 +151259,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (!!) + (1 2) = 1 -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1443: "$PERL" -pi -e 'use strict; +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149414,7 +151269,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +input: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149424,9 +151332,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: + | (# + 1) = 1111 ./calc.at:1454: cat stderr +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1435: cat stderr +stderr: Starting parse Entering state 0 Stack now 0 @@ -149436,112 +151346,68 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Stack now 0 8 20 4 +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) +Stack now 0 4 11 +Reading a token +Next token is token number (1) +Error: discarding token number (1) Error: popping token error () -Stack now 0 8 20 4 +Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Next token is token '\n' () @@ -149549,7 +151415,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -149567,19 +151433,27 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1443: cat stderr - | 1//2 -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1435: cat stderr input: +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: cat stderr ./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: + | (1 + #) = 1111 +./calc.at:1453: cat stderr +./calc.at:1435: $PREPARSER ./calc input input: stderr: -1.3: syntax error - | 1 2 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: $PREPARSER ./calc input +stderr: +./calc.at:1443: cat stderr +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -149589,112 +151463,68 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Stack now 0 4 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 Shifting token error () Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Stack now 0 8 20 4 +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) +Stack now 0 4 11 +Reading a token +Next token is token number (1) +Error: discarding token number (1) Error: popping token error () -Stack now 0 8 20 4 +Stack now 0 4 Shifting token error () Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token Next token is token ')' () Shifting token ')' () Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 Stack now 0 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' () Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Stack now 0 8 Next token is token '\n' () @@ -149702,7 +151532,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -149720,55 +151550,35 @@ Stack now 0 6 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -input: - | (* *) + (*) + (*) - | - | +1 -./calc.at:1443: $PREPARSER ./calc input + | 1 = 2 = 3 ./calc.at:1454: $PREPARSER ./calc input -stderr: +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: $PREPARSER ./calc /dev/null input: stderr: +input: +1.7: syntax error, unexpected '=' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ + | (- *) + (1 2) = 1 + | 1//2 stderr: +./calc.at:1457: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1453: $PREPARSER ./calc input stderr: -1.3: syntax error -./calc.at:1435: $PREPARSER ./calc input -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) Stack now 0 +1.7: syntax error, unexpected '=' stderr: -./calc.at:1445: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149778,10 +151588,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1435: cat stderr +stderr: +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149791,12 +151608,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: cat stderr -stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1445: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149806,37 +151618,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1435: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1454: cat stderr stderr: -./calc.at:1476: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) Stack now 0 -stderr: -input: -./calc.at:1454: "$PERL" -pi -e 'use strict; +./calc.at:1453: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -149846,22 +151642,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 -stdout: -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1451: cat stderr input: -./calc.at:1455: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - ./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -149872,10 +151653,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: cat stderr -./calc.at:1454: cat stderr - | 1 + 2 * 3 + !- ++ +input: +./calc.at:1445: cat stderr + | (# + 1) = 1111 + | + | +1 +./calc.at:1454: $PREPARSER ./calc input ./calc.at:1435: $PREPARSER ./calc input +./calc.at:1453: cat stderr +./calc.at:1443: cat stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: cat stderr +2.1: syntax error, unexpected '+' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +input: +1.2: syntax error: invalid character: '#' +input: + | (1 + # + 1) = 1111 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (* *) + (*) + (*) +./calc.at:1445: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1453: $PREPARSER ./calc input +input: +stderr: +stderr: stderr: Starting parse Entering state 0 @@ -149886,25 +151693,48 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 4 2 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 +Error: popping nterm exp (1) +Stack now 0 4 Shifting token error () -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1) +Error: discarding token number (1) Error: popping token error () Stack now 0 4 Shifting token error () @@ -149923,156 +151753,423 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () + | error +./calc.at:1457: $PREPARSER ./calc input +2.1: syntax error, unexpected '+' +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Stack now 0 8 20 4 -Shifting token error () +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -input: -./calc.at:1443: cat stderr -input: -input: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1455: $PREPARSER ./calc input -input: - | 1//2 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1454: $PREPARSER ./calc input - | error -./calc.at:1451: $PREPARSER ./calc input -stderr: -./calc.at:1449: $PREPARSER ./calc input -stderr: -./calc.at:1443: $PREPARSER ./calc /dev/null +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.3: syntax error +./calc.at:1435: cat stderr +stdout: stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1446: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + stderr: +./calc.at:1454: cat stderr stderr: -1.1: syntax error Starting parse Entering state 0 Stack now 0 @@ -150082,25 +152179,48 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Stack now 0 4 2 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Stack now 0 4 12 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Stack now 0 4 12 +Error: popping nterm exp (1) +Stack now 0 4 Shifting token error () -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token () +Error: discarding token invalid token () +Error: popping token error () Stack now 0 4 Shifting token error () Entering state 11 Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Error: popping token error () +Stack now 0 4 +Shifting token error () +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1) +Error: discarding token number (1) Error: popping token error () Stack now 0 4 Shifting token error () @@ -150119,129 +152239,433 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' () +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Stack now 0 8 20 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Stack now 0 8 20 4 12 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) Stack now 0 8 20 4 -Shifting token error () +Shifting token error (1.7-18: ) Entering state 11 Stack now 0 8 20 4 11 -Next token is token number (2) -Error: discarding token number (2) -Error: popping token error () +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Stack now 0 8 20 4 -Shifting token error () +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) Entering state 11 Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '=' () +Next token is token '+' (1.30: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stdout: -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +input: +./calc.at:1454: $PREPARSER ./calc /dev/null +input: + | (1 + # + 1) = 1111 +./calc.at:1435: $PREPARSER ./calc input +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 stderr: +./calc.at:1446: $PREPARSER ./calc input +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.1: syntax error, unexpected end of file +./calc.at:1453: cat stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: cat stderr +./calc.at:1443: cat stderr stderr: stderr: stderr: -./calc.at:1453: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.3: syntax error -1.1: syntax error +./calc.at:1457: cat stderr Starting parse Entering state 0 Stack now 0 @@ -150250,7 +152674,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -150265,7 +152689,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -150280,16 +152704,14 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R9 G29 R7 G8 S18 -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -150297,7 +152719,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -150306,7 +152728,6 @@ Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -150314,16 +152735,14 @@ Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) @@ -150332,16 +152751,15 @@ Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 @@ -150351,7 +152769,7 @@ Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 @@ -150366,7 +152784,7 @@ Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 @@ -150386,23 +152804,21 @@ Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 30 Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -150410,7 +152826,7 @@ Entering state 29 Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -150419,7 +152835,6 @@ Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -150432,23 +152847,21 @@ Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) @@ -150457,16 +152870,15 @@ Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) @@ -150477,12 +152889,12 @@ Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) @@ -150498,7 +152910,7 @@ Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 @@ -150513,16 +152925,14 @@ Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G10 R11 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -150530,7 +152940,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -150538,7 +152948,6 @@ Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -150551,23 +152960,21 @@ Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) @@ -150576,16 +152983,15 @@ Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) @@ -150606,16 +153012,14 @@ Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R11 G12 S26 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -150623,10 +153027,9 @@ Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -150643,16 +153046,14 @@ Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -150661,7 +153062,6 @@ Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -150669,16 +153069,14 @@ Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) @@ -150687,16 +153085,15 @@ Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) @@ -150707,12 +153104,12 @@ Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) @@ -150738,30 +153135,28 @@ Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -150769,7 +153164,6 @@ Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -150782,23 +153176,21 @@ Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) @@ -150807,16 +153199,15 @@ Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) @@ -150827,12 +153218,12 @@ Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) @@ -150843,7 +153234,7 @@ Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 @@ -150858,16 +153249,14 @@ Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -LAC: initial context established for '-' -LAC: checking lookahead '-': R8 G8 S19 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -150876,7 +153265,6 @@ Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) -LAC: initial context discarded due to shift Entering state 19 Stack now 0 6 8 19 Reading a token @@ -150884,16 +153272,14 @@ Shifting token number (9.9: 3) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -150902,7 +153288,6 @@ Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -150915,23 +153300,21 @@ Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) @@ -150940,16 +153323,15 @@ Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) @@ -150960,7 +153342,7 @@ Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 @@ -150980,7 +153362,7 @@ Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 @@ -150995,16 +153377,14 @@ Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -151013,10 +153393,9 @@ Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -151025,9 +153404,7 @@ Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -151036,7 +153413,6 @@ Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -151044,16 +153420,14 @@ Shifting token number (10.15: 2) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) @@ -151062,16 +153436,15 @@ Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) @@ -151082,12 +153455,12 @@ Shifting token '\n' (11.1-12.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) @@ -151098,7 +153471,7 @@ Shifting token number (12.1: 2) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 @@ -151113,7 +153486,7 @@ Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 @@ -151128,16 +153501,14 @@ Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G32 R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -151145,7 +153516,7 @@ Entering state 32 Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -151154,7 +153525,6 @@ Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -151162,16 +153532,14 @@ Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) @@ -151180,16 +153548,15 @@ Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) @@ -151205,7 +153572,7 @@ Shifting token number (13.2: 2) Entering state 1 Stack now 0 6 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 @@ -151220,16 +153587,14 @@ Shifting token number (13.4: 2) Entering state 1 Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R12 G12 S26 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -151238,10 +153603,9 @@ Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -151258,16 +153622,14 @@ Shifting token number (13.7: 3) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -151276,7 +153638,6 @@ Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -151284,16 +153645,14 @@ Shifting token number (13.11-12: 64) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) @@ -151302,41 +153661,39 @@ Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-13.0: ) $2 = nterm line (13.1-14.0: ) -> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of file (14.1: ) -Shifting token end of file (14.1: ) +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of file (14.1: ) +Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 +1.6: syntax error: invalid character: '#' +1.1: syntax error, unexpected end of file + | 1 + 2 * 3 + !+ ++ +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stdout: ./calc.at:1435: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -151347,41 +153704,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1451: "$PERL" -pi -e 'use strict; +input: +input: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1455: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -151391,10 +153729,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1454: $PREPARSER ./calc input -stderr: -./calc.at:1435: cat stderr + | 1 = 2 = 3 + | (!!) + (1 2) = 1 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1445: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -151404,7 +153743,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -151419,7 +153758,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -151434,16 +153773,14 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R9 G29 R7 G8 S18 -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -151451,7 +153788,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -151460,7 +153797,6 @@ Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -151468,16 +153804,14 @@ Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) @@ -151486,16 +153820,15 @@ Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 82): +Reducing stack by rule 1 (line 69): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 @@ -151505,7 +153838,7 @@ Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 @@ -151520,7 +153853,7 @@ Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 @@ -151540,23 +153873,21 @@ Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 30 Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 105): +Reducing stack by rule 9 (line 92): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -151564,7 +153895,7 @@ Entering state 29 Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 103): +Reducing stack by rule 7 (line 90): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -151573,7 +153904,6 @@ Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -151586,23 +153916,21 @@ Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) @@ -151611,16 +153939,15 @@ Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) @@ -151631,12 +153958,12 @@ Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) @@ -151652,7 +153979,7 @@ Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 @@ -151667,16 +153994,14 @@ Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G10 R11 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -151684,7 +154009,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -151692,7 +154017,6 @@ Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -151705,23 +154029,21 @@ Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) @@ -151730,16 +154052,15 @@ Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) @@ -151760,16 +154081,14 @@ Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R11 G12 S26 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -151777,10 +154096,9 @@ Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -151797,16 +154115,14 @@ Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -151815,7 +154131,6 @@ Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -151823,16 +154138,14 @@ Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) @@ -151841,16 +154154,15 @@ Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) @@ -151861,12 +154173,12 @@ Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) @@ -151892,30 +154204,28 @@ Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -151923,7 +154233,6 @@ Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -151936,23 +154245,21 @@ Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) @@ -151961,16 +154268,15 @@ Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) @@ -151981,12 +154287,12 @@ Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) @@ -151997,7 +154303,7 @@ Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 @@ -152012,16 +154318,14 @@ Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -LAC: initial context established for '-' -LAC: checking lookahead '-': R8 G8 S19 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -152030,7 +154334,6 @@ Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) -LAC: initial context discarded due to shift Entering state 19 Stack now 0 6 8 19 Reading a token @@ -152038,16 +154341,14 @@ Shifting token number (9.9: 3) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -152056,7 +154357,6 @@ Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -152069,23 +154369,21 @@ Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R11 G27 R6 G8 S24 -Reducing stack by rule 11 (line 115): +Reducing stack by rule 11 (line 102): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) @@ -152094,16 +154392,15 @@ Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) @@ -152114,7 +154411,7 @@ Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 @@ -152134,7 +154431,7 @@ Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 @@ -152149,16 +154446,14 @@ Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R8 G12 S26 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -152167,10 +154462,9 @@ Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -152179,9 +154473,7 @@ Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R8 G8 S18 -Reducing stack by rule 8 (line 104): +Reducing stack by rule 8 (line 91): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -152190,7 +154482,6 @@ Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -152198,16 +154489,14 @@ Shifting token number (10.15: 2) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) @@ -152216,16 +154505,15 @@ Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) @@ -152236,12 +154524,12 @@ Shifting token '\n' (11.1-12.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 87): +Reducing stack by rule 3 (line 74): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) @@ -152252,7 +154540,7 @@ Shifting token number (12.1: 2) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 @@ -152267,7 +154555,7 @@ Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 @@ -152282,16 +154570,14 @@ Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G32 R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -152299,7 +154585,7 @@ Entering state 32 Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -152308,7 +154594,6 @@ Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -152316,16 +154601,14 @@ Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) @@ -152334,16 +154617,15 @@ Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 83): +Reducing stack by rule 2 (line 70): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) @@ -152359,7 +154641,7 @@ Shifting token number (13.2: 2) Entering state 1 Stack now 0 6 4 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.2: 2) -> $$ = nterm exp (13.2: 2) Entering state 12 @@ -152374,16 +154656,14 @@ Shifting token number (13.4: 2) Entering state 1 Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.4: 2) -> $$ = nterm exp (13.4: 2) Entering state 32 Stack now 0 6 4 12 23 32 Reading a token Next token is token ')' (13.5: ) -LAC: initial context established for ')' -LAC: checking lookahead ')': R12 G12 S26 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.2: 2) $2 = token '^' (13.3: ) $3 = nterm exp (13.4: 2) @@ -152392,10 +154672,9 @@ Stack now 0 6 4 12 Next token is token ')' (13.5: ) Shifting token ')' (13.5: ) -LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 117): +Reducing stack by rule 13 (line 104): $1 = token '(' (13.1: ) $2 = nterm exp (13.2-4: 4) $3 = token ')' (13.5: ) @@ -152412,16 +154691,14 @@ Shifting token number (13.7: 3) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.7: 3) -> $$ = nterm exp (13.7: 3) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (13.9: ) -LAC: initial context established for '=' -LAC: checking lookahead '=': R12 G8 S18 -Reducing stack by rule 12 (line 116): +Reducing stack by rule 12 (line 103): $1 = nterm exp (13.1-5: 4) $2 = token '^' (13.6: ) $3 = nterm exp (13.7: 3) @@ -152430,7 +154707,6 @@ Stack now 0 6 8 Next token is token '=' (13.9: ) Shifting token '=' (13.9: ) -LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -152438,16 +154714,14 @@ Shifting token number (13.11-12: 64) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 92): +Reducing stack by rule 5 (line 79): $1 = token number (13.11-12: 64) -> $$ = nterm exp (13.11-12: 64) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (13.13-14.0: ) -LAC: initial context established for '\n' -LAC: checking lookahead '\n': R6 G8 S24 -Reducing stack by rule 6 (line 93): +Reducing stack by rule 6 (line 80): $1 = nterm exp (13.1-7: 64) $2 = token '=' (13.9: ) $3 = nterm exp (13.11-12: 64) @@ -152456,1585 +154730,180 @@ Stack now 0 6 8 Next token is token '\n' (13.13-14.0: ) Shifting token '\n' (13.13-14.0: ) -LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 88): +Reducing stack by rule 4 (line 75): $1 = nterm exp (13.1-12: 64) $2 = token '\n' (13.13-14.0: ) -> $$ = nterm line (13.1-14.0: ) Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 83): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of file (14.1: ) -Shifting token end of file (14.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of file (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1445: cat stderr -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (#) + (#) = 2222 -stderr: -./calc.at:1451: cat stderr -input: -./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1435: $PREPARSER ./calc input - | (* *) + (*) + (*) -stderr: -./calc.at:1445: $PREPARSER ./calc input - | 1 2 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1443: cat stderr -./calc.at:1449: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: -stderr: - | 1 = 2 = 3 -stderr: -input: -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: - | 1 2 -stderr: -./calc.at:1453: $PREPARSER ./calc input -stderr: -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | error -./calc.at:1449: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' () -Error: discarding token '*' () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token number (1.3: 2) -LAC: initial context established for number -LAC: checking lookahead number: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: Err -LAC: checking lookahead '=': S18 -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -stderr: -./calc.at:1443: $PREPARSER ./calc input -1.7: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.3: syntax error, unexpected number -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: cat stderr -input: -stderr: -1.1: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | (#) + (#) = 2222 -./calc.at:1454: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.3: syntax error, unexpected number -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.7: syntax error -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: cat stderr -input: -stderr: - | (1 + #) = 1111 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1454: cat stderr -stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1445: cat stderr -input: -stderr: - | 1//2 -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' -input: -./calc.at:1455: $PREPARSER ./calc input - | (1 + #) = 1111 -./calc.at:1454: $PREPARSER ./calc input -input: -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1451: cat stderr -./calc.at:1453: cat stderr -stderr: -./calc.at:1445: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -LAC: initial context established for '/' -LAC: checking lookahead '/': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' () -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1443: cat stderr -stderr: -input: -1.6: syntax error: invalid character: '#' -input: - | 1//2 -./calc.at:1435: cat stderr -./calc.at:1453: $PREPARSER ./calc input - | - | +1 -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -LAC: initial context established for '/' -LAC: checking lookahead '/': Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': Err -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -./calc.at:1449: cat stderr +Stack now 0 6 17 +Reducing stack by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (14.1: ) +Shifting token end of input (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1454: cat stderr +./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr stderr: -2.1: syntax error stderr: -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -input: +./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1435: cat stderr stderr: input: -./calc.at:1457: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 1 +Stack now 0 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token ')' () +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 29 21 1 +Stack now 0 8 22 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Stack now 0 8 20 29 21 30 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '+' () -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' () -Reducing stack by rule 7 (line 90): +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' () +Reducing stack by rule 8 (line 91): $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' () +Reducing stack by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Stack now 0 8 20 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) - | (!!) + (1 2) = 1 -input: -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1445: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | (# + 1) = 1111 -./calc.at:1435: $PREPARSER ./calc input -stderr: - | 1 = 2 = 3 -./calc.at:1449: $PREPARSER ./calc input -2.1: syntax error -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: cat stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -stderr: -./calc.at:1457: $PREPARSER ./calc input -1.7: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.2: syntax error: invalid character: '#' -stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Stack now 0 @@ -154175,145 +155044,210 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: $PREPARSER ./calc input + | 1 2 +./calc.at:1446: $PREPARSER ./calc input input: - | 1 + 2 * 3 + !- ++ -./calc.at:1455: cat stderr input: -./calc.at:1445: $PREPARSER ./calc input -1.7: syntax error -./calc.at:1453: cat stderr - | (# + 1) = 1111 +input: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1454: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1453: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1435: $PREPARSER ./calc input stderr: stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +stderr: +stderr: +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) Starting parse Entering state 0 Stack now 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Stack now 0 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Stack now 0 8 +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 -Stack now 0 8 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Stack now 0 8 20 1 +Stack now 0 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Stack now 0 8 20 29 21 30 +Stack now 0 4 12 20 29 Reading a token -Next token is token '+' () -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' () +Next token is token ')' () Reducing stack by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Stack now 0 8 20 5 +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Stack now 0 8 22 4 12 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' () +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' () +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1) $2 = token '-' () -Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1453: $PREPARSER ./calc input - | error -./calc.at:1455: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' () +Reducing stack by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input () +Shifting token end of input () +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 stderr: -1.1: syntax error, unexpected invalid token -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -154454,1571 +155388,1125 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: cat stderr -stderr: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1451: cat stderr -input: stderr: -1.1: syntax error, unexpected invalid token stderr: - | 1 2 -./calc.at:1457: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Stack now 0 8 20 29 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack by rule 5 (line 92): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '+' () -Reducing stack by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token '=' (1.11: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R9 G29 R7 G8 S18 +Reducing stack by rule 9 (line 105): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Stack now 0 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Stack now 0 8 20 -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -LAC: initial context established for invalid token -LAC: checking lookahead invalid token: Err -LAC: checking lookahead end of file: Err -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -LAC: checking lookahead ')': Err -LAC: checking lookahead '!': S5 -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1451: $PREPARSER ./calc /dev/null -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: cat stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1443: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: cat stderr -./calc.at:1453: cat stderr -1.1: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1445: cat stderr -./calc.at:1454: cat stderr -input: -input: -stderr: -stderr: -./calc.at:1457: cat stderr -1.6: syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1453: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1455: $PREPARSER ./calc input -1.1: syntax error -input: -stderr: -stderr: -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (1.13: 7) +Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '=' (1.7: ) -LAC: checking lookahead '=': Err -LAC: checking lookahead end of file: R6 G8 Err -LAC: checking lookahead number: R6 G8 Err -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -LAC: checking lookahead NEG: R6 G8 Err -LAC: checking lookahead '^': S23 -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) +Next token is token '\n' (1.14-2.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1443: $PREPARSER ./calc input -input: - | (1 + # + 1) = 1111 -./calc.at:1454: $PREPARSER ./calc input -1.7: syntax error, unexpected '=' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -2.1: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -input: -./calc.at:1445: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - | 1//2 -./calc.at:1457: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token number (2.1: 1) +Shifting token number (2.1: 1) Entering state 1 -Stack now 0 1 +Stack now 0 6 1 Reducing stack by rule 5 (line 92): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Stack now 0 6 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token number (2.5: 2) +Shifting token number (2.5: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 6 8 20 1 Reducing stack by rule 5 (line 92): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Stack now 0 6 8 20 29 Reading a token -Next token is token '=' (1.7: ) -LAC: checking lookahead '=': Err -LAC: checking lookahead end of file: R6 G8 Err -LAC: checking lookahead number: R6 G8 Err -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S19 -LAC: checking lookahead '+': S20 -LAC: checking lookahead '*': S21 -LAC: checking lookahead '/': S22 -LAC: checking lookahead NEG: R6 G8 Err -LAC: checking lookahead '^': S23 -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Stack now 0 6 8 20 29 21 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Stack now 0 6 8 20 29 21 2 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token number (2.10: 3) +Shifting token number (2.10: 3) +Entering state 1 +Stack now 0 6 8 20 29 21 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Stack now 0 6 8 20 29 21 2 10 +Reading a token +Next token is token '=' (2.12: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 +Reducing stack by rule 11 (line 115): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Stack now 0 6 8 20 29 21 30 +Next token is token '=' (2.12: ) +Reducing stack by rule 9 (line 105): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Stack now 0 6 8 20 29 +Next token is token '=' (2.12: ) +Reducing stack by rule 7 (line 103): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 -Stack now 0 4 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (2.15: 5) +Shifting token number (2.15: 5) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '\n' (2.16-3.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (4.2: 1) +Shifting token number (4.2: 1) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Stack now 0 8 20 4 12 +Stack now 0 6 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Stack now 0 6 2 10 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Stack now 0 6 2 10 23 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (4.4: 2) +Shifting token number (4.4: 2) +Entering state 1 +Stack now 0 6 2 10 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Stack now 0 6 2 10 23 32 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' (4.6: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G10 R11 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (4.6: ) +Reducing stack by rule 11 (line 115): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +LAC: initial context discarded due to shift Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (4.9: 1) +Shifting token number (4.9: 1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' (4.10-5.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +LAC: initial context discarded due to shift Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Stack now 0 6 4 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Stack now 0 6 4 2 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 +Next token is token number (5.3: 1) +Shifting token number (5.3: 1) +Entering state 1 +Stack now 0 6 4 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Stack now 0 6 4 2 10 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 +Next token is token ')' (5.4: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R11 G12 S26 +Reducing stack by rule 11 (line 115): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Stack now 0 6 8 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (5.6: 2) +Shifting token number (5.6: 2) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Stack now 0 6 8 23 32 Reading a token -Next token is token '=' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (5.8: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 6 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +LAC: initial context discarded due to shift Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (5.10: 1) +Shifting token number (5.10: 1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) +Next token is token '\n' (5.11-6.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 6 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +LAC: initial context discarded due to shift Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1435: cat stderr -1.7: syntax error, unexpected '=' -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Stack now 0 8 +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 8 20 +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Stack now 0 6 2 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 20 4 +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Stack now 0 6 2 2 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 8 20 4 -Shifting token error () -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Stack now 0 6 2 2 2 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (7.4: 1) +Shifting token number (7.4: 1) +Entering state 1 +Stack now 0 6 2 2 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Stack now 0 6 2 2 2 10 Reading a token -Next token is token '=' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '=' (7.6: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Stack now 0 6 2 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Stack now 0 6 2 10 +Next token is token '=' (7.6: ) +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Stack now 0 8 -Next token is token '=' () -Shifting token '=' () +Stack now 0 6 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +LAC: initial context discarded due to shift Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Stack now 0 6 8 18 2 +Reading a token +Next token is token number (7.9: 1) +Shifting token number (7.9: 1) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) -Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) +Next token is token '\n' (7.10-8.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 6 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +LAC: initial context discarded due to shift Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: -./calc.at:1453: cat stderr -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1455: cat stderr -Starting parse -Entering state 0 -Stack now 0 +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Stack now 0 6 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (9.1: 1) +Shifting token number (9.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.5: 2) +Shifting token number (9.5: 2) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '-' (9.7: ) +LAC: initial context established for '-' +LAC: checking lookahead '-': R8 G8 S19 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Stack now 0 6 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +LAC: initial context discarded due to shift +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token number (9.9: 3) +Shifting token number (9.9: 3) +Entering state 1 +Stack now 0 6 8 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (9.11: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 -Stack now 0 4 2 +Stack now 0 6 8 18 2 Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (9.14: 4) +Shifting token number (9.14: 4) +Entering state 1 +Stack now 0 6 8 18 2 1 +Reducing stack by rule 5 (line 92): + $1 = token number (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Stack now 0 6 8 18 2 10 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) +Next token is token '\n' (9.15-10.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Stack now 0 6 8 18 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack by rule 6 (line 93): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Stack now 0 6 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token number (10.1: 1) +Shifting token number (10.1: 1) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Stack now 0 6 8 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 -Stack now 0 8 20 4 +Stack now 0 6 8 19 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (10.6: 2) +Shifting token number (10.6: 2) Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) +Stack now 0 6 8 19 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 -Stack now 0 8 20 4 12 +Stack now 0 6 8 19 4 12 Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Stack now 0 6 8 19 4 12 19 Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Next token is token number (10.10: 3) +Shifting token number (10.10: 3) +Entering state 1 +Stack now 0 6 8 19 4 12 19 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Stack now 0 6 8 19 4 12 19 28 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token ')' (10.11: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Stack now 0 6 8 19 4 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 8 19 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Stack now 0 6 8 19 28 +Reading a token +Next token is token '=' (10.13: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Stack now 0 6 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +LAC: initial context discarded due to shift Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (10.15: 2) +Shifting token number (10.15: 2) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Next token is token '\n' (10.16-11.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Stack now 0 6 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +LAC: initial context discarded due to shift Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | (1 + 1) / (1 - 1) -./calc.at:1451: cat stderr -./calc.at:1435: $PREPARSER ./calc input -./calc.at:1454: cat stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.11-17: error: null divisor -./calc.at:1435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -input: -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | - | +1 -input: -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1454: $PREPARSER ./calc input -1.11-17: error: null divisor - | - | +1 -./calc.at:1455: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1449: cat stderr -stderr: -./calc.at:1457: cat stderr -stderr: -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1445: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 87): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (2.1: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -LAC: checking lookahead end of file: S16 -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -stderr: -2.1: syntax error, unexpected '+' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: cat stderr -1.11-17: error: null divisor -stderr: -./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./calc.at:1435: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -input: -./calc.at:1446: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1449: $PREPARSER ./calc /dev/null -stderr: -stderr: -stderr: -input: -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -2.1: syntax error, unexpected '+' -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 -Stack now 0 3 +Stack now 0 6 3 Reducing stack by rule 3 (line 87): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 82): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (2.1: ) -LAC: initial context established for '+' -LAC: checking lookahead '+': Err -LAC: checking lookahead end of file: S16 -LAC: checking lookahead number: S1 -LAC: checking lookahead '=': Err -LAC: checking lookahead '-': S2 -LAC: checking lookahead '+': Err -LAC: checking lookahead '*': Err -LAC: checking lookahead '/': Err -LAC: checking lookahead NEG: Err -LAC: checking lookahead '^': Err -LAC: checking lookahead '\n': S3 -LAC: checking lookahead '(': S4 -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 - | (1 + #) = 1111 -1.11-17: error: null divisor -./calc.at:1445: $PREPARSER ./calc input -stderr: - | error -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1435: cat stderr -stderr: -input: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token number (12.1: 2) +Shifting token number (12.1: 2) +Entering state 1 +Stack now 0 6 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Stack now 0 6 8 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Stack now 0 6 8 23 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (12.3: 2) +Shifting token number (12.3: 2) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Stack now 0 6 8 23 32 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Stack now 0 6 8 23 32 23 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Stack now 0 4 12 -Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token number (12.5: 3) +Shifting token number (12.5: 3) +Entering state 1 +Stack now 0 6 8 23 32 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Stack now 0 6 8 23 32 23 32 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' (12.7: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G32 R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Stack now 0 6 8 23 32 +Next token is token '=' (12.7: ) +Reducing stack by rule 12 (line 116): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () +Stack now 0 6 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +LAC: initial context discarded due to shift Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (12.9-11: 256) +Shifting token number (12.9-11: 256) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '\n' (12.12-13.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 6 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +LAC: initial context discarded due to shift Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -1.1: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -527. calc.at:1435: stderr: - ok -./calc.at:1454: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1446: $PREPARSER ./calc input -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1453: cat stderr -./calc.at:1454: cat stderr -1.1: syntax error -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 -Stack now 0 4 +Stack now 0 6 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 -Stack now 0 4 12 +Stack now 0 6 4 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Stack now 0 4 12 -Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token ')' (13.5: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R12 G12 S26 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 -Stack now 0 8 +Stack now 0 6 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +LAC: initial context discarded due to shift Entering state 18 -Stack now 0 8 18 +Stack now 0 6 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 -Stack now 0 8 18 27 +Stack now 0 6 8 18 27 Reading a token -Next token is token '\n' () -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '\n' (13.13-14.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +LAC: initial context discarded due to shift Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of file (14.1: ) +Shifting token end of file (14.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.1-46: error: 4444 != 1 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1453: $PREPARSER ./calc /dev/null -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1449: "$PERL" -pi -e 'use strict; +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +stderr: +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -156028,11 +156516,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -1.1: syntax error, unexpected end of input -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -539. calc.at:1454: ok Starting parse Entering state 0 Stack now 0 @@ -156041,7 +156524,7 @@ Shifting token number (1.1: 1) Entering state 1 Stack now 0 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.1: 1) -> $$ = nterm exp (1.1: 1) Entering state 8 @@ -156056,7 +156539,7 @@ Shifting token number (1.5: 2) Entering state 1 Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) Entering state 29 @@ -156071,14 +156554,16 @@ Shifting token number (1.9: 3) Entering state 1 Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.9: 3) -> $$ = nterm exp (1.9: 3) Entering state 30 Stack now 0 8 20 29 21 30 Reading a token Next token is token '=' (1.11: ) -Reducing stack by rule 9 (line 92): +LAC: initial context established for '=' +LAC: checking lookahead '=': R9 G29 R7 G8 S18 +Reducing stack by rule 9 (line 105): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) @@ -156086,7 +156571,7 @@ Entering state 29 Stack now 0 8 20 29 Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) @@ -156095,6 +156580,7 @@ Stack now 0 8 Next token is token '=' (1.11: ) Shifting token '=' (1.11: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 8 18 Reading a token @@ -156102,14 +156588,16 @@ Shifting token number (1.13: 7) Entering state 1 Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (1.13: 7) -> $$ = nterm exp (1.13: 7) Entering state 27 Stack now 0 8 18 27 Reading a token Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (1.1-9: 7) $2 = token '=' (1.11: ) $3 = nterm exp (1.13: 7) @@ -156118,15 +156606,16 @@ Stack now 0 8 Next token is token '\n' (1.14-2.0: ) Shifting token '\n' (1.14-2.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (1.1-13: 7) $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 -Reducing stack by rule 1 (line 69): +Reducing stack by rule 1 (line 82): $1 = nterm line (1.1-2.0: ) -> $$ = nterm input (1.1-2.0: ) Entering state 6 @@ -156136,7 +156625,7 @@ Shifting token number (2.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.1: 1) -> $$ = nterm exp (2.1: 1) Entering state 8 @@ -156151,7 +156640,7 @@ Shifting token number (2.5: 2) Entering state 1 Stack now 0 6 8 20 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.5: 2) -> $$ = nterm exp (2.5: 2) Entering state 29 @@ -156171,21 +156660,23 @@ Shifting token number (2.10: 3) Entering state 1 Stack now 0 6 8 20 29 21 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.10: 3) -> $$ = nterm exp (2.10: 3) Entering state 10 Stack now 0 6 8 20 29 21 2 10 Reading a token Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G30 R9 G29 R7 G8 S18 +Reducing stack by rule 11 (line 115): $1 = token '-' (2.9: ) $2 = nterm exp (2.10: 3) -> $$ = nterm exp (2.9-10: -3) Entering state 30 Stack now 0 6 8 20 29 21 30 Next token is token '=' (2.12: ) -Reducing stack by rule 9 (line 92): +Reducing stack by rule 9 (line 105): $1 = nterm exp (2.5: 2) $2 = token '*' (2.7: ) $3 = nterm exp (2.9-10: -3) @@ -156193,7 +156684,7 @@ Entering state 29 Stack now 0 6 8 20 29 Next token is token '=' (2.12: ) -Reducing stack by rule 7 (line 90): +Reducing stack by rule 7 (line 103): $1 = nterm exp (2.1: 1) $2 = token '+' (2.3: ) $3 = nterm exp (2.5-10: -6) @@ -156202,6 +156693,7 @@ Stack now 0 6 8 Next token is token '=' (2.12: ) Shifting token '=' (2.12: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -156214,21 +156706,23 @@ Shifting token number (2.15: 5) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (2.15: 5) -> $$ = nterm exp (2.15: 5) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (2.14: ) $2 = nterm exp (2.15: 5) -> $$ = nterm exp (2.14-15: -5) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (2.1-10: -5) $2 = token '=' (2.12: ) $3 = nterm exp (2.14-15: -5) @@ -156237,15 +156731,16 @@ Stack now 0 6 8 Next token is token '\n' (2.16-3.0: ) Shifting token '\n' (2.16-3.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (2.1-15: -5) $2 = token '\n' (2.16-3.0: ) -> $$ = nterm line (2.1-3.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-2.0: ) $2 = nterm line (2.1-3.0: ) -> $$ = nterm input (1.1-3.0: ) @@ -156256,12 +156751,12 @@ Shifting token '\n' (3.1-4.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (3.1-4.0: ) -> $$ = nterm line (3.1-4.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-3.0: ) $2 = nterm line (3.1-4.0: ) -> $$ = nterm input (1.1-4.0: ) @@ -156277,7 +156772,7 @@ Shifting token number (4.2: 1) Entering state 1 Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (4.2: 1) -> $$ = nterm exp (4.2: 1) Entering state 10 @@ -156292,14 +156787,16 @@ Shifting token number (4.4: 2) Entering state 1 Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (4.4: 2) -> $$ = nterm exp (4.4: 2) Entering state 32 Stack now 0 6 2 10 23 32 Reading a token Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G10 R11 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (4.2: 1) $2 = token '^' (4.3: ) $3 = nterm exp (4.4: 2) @@ -156307,7 +156804,7 @@ Entering state 10 Stack now 0 6 2 10 Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): +Reducing stack by rule 11 (line 115): $1 = token '-' (4.1: ) $2 = nterm exp (4.2-4: 1) -> $$ = nterm exp (4.1-4: -1) @@ -156315,6 +156812,7 @@ Stack now 0 6 8 Next token is token '=' (4.6: ) Shifting token '=' (4.6: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -156327,21 +156825,23 @@ Shifting token number (4.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (4.9: 1) -> $$ = nterm exp (4.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (4.8: ) $2 = nterm exp (4.9: 1) -> $$ = nterm exp (4.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (4.1-4: -1) $2 = token '=' (4.6: ) $3 = nterm exp (4.8-9: -1) @@ -156350,15 +156850,16 @@ Stack now 0 6 8 Next token is token '\n' (4.10-5.0: ) Shifting token '\n' (4.10-5.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (4.1-9: -1) $2 = token '\n' (4.10-5.0: ) -> $$ = nterm line (4.1-5.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-4.0: ) $2 = nterm line (4.1-5.0: ) -> $$ = nterm input (1.1-5.0: ) @@ -156379,14 +156880,16 @@ Shifting token number (5.3: 1) Entering state 1 Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (5.3: 1) -> $$ = nterm exp (5.3: 1) Entering state 10 Stack now 0 6 4 2 10 Reading a token Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for ')' +LAC: checking lookahead ')': R11 G12 S26 +Reducing stack by rule 11 (line 115): $1 = token '-' (5.2: ) $2 = nterm exp (5.3: 1) -> $$ = nterm exp (5.2-3: -1) @@ -156394,9 +156897,10 @@ Stack now 0 6 4 12 Next token is token ')' (5.4: ) Shifting token ')' (5.4: ) +LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack by rule 13 (line 117): $1 = token '(' (5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) @@ -156413,14 +156917,16 @@ Shifting token number (5.6: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (5.6: 2) -> $$ = nterm exp (5.6: 2) Entering state 32 Stack now 0 6 8 23 32 Reading a token Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (5.1-4: -1) $2 = token '^' (5.5: ) $3 = nterm exp (5.6: 2) @@ -156429,6 +156935,7 @@ Stack now 0 6 8 Next token is token '=' (5.8: ) Shifting token '=' (5.8: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -156436,14 +156943,16 @@ Shifting token number (5.10: 1) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (5.10: 1) -> $$ = nterm exp (5.10: 1) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (5.1-6: 1) $2 = token '=' (5.8: ) $3 = nterm exp (5.10: 1) @@ -156452,15 +156961,16 @@ Stack now 0 6 8 Next token is token '\n' (5.11-6.0: ) Shifting token '\n' (5.11-6.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (5.1-10: 1) $2 = token '\n' (5.11-6.0: ) -> $$ = nterm line (5.1-6.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-5.0: ) $2 = nterm line (5.1-6.0: ) -> $$ = nterm input (1.1-6.0: ) @@ -156471,12 +156981,12 @@ Shifting token '\n' (6.1-7.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (6.1-7.0: ) -> $$ = nterm line (6.1-7.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-6.0: ) $2 = nterm line (6.1-7.0: ) -> $$ = nterm input (1.1-7.0: ) @@ -156502,28 +157012,30 @@ Shifting token number (7.4: 1) Entering state 1 Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (7.4: 1) -> $$ = nterm exp (7.4: 1) Entering state 10 Stack now 0 6 2 2 2 10 Reading a token Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '=' +LAC: checking lookahead '=': R11 G10 R11 G10 R11 G8 S18 +Reducing stack by rule 11 (line 115): $1 = token '-' (7.3: ) $2 = nterm exp (7.4: 1) -> $$ = nterm exp (7.3-4: -1) Entering state 10 Stack now 0 6 2 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): +Reducing stack by rule 11 (line 115): $1 = token '-' (7.2: ) $2 = nterm exp (7.3-4: -1) -> $$ = nterm exp (7.2-4: 1) Entering state 10 Stack now 0 6 2 10 Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): +Reducing stack by rule 11 (line 115): $1 = token '-' (7.1: ) $2 = nterm exp (7.2-4: 1) -> $$ = nterm exp (7.1-4: -1) @@ -156531,6 +157043,7 @@ Stack now 0 6 8 Next token is token '=' (7.6: ) Shifting token '=' (7.6: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -156543,21 +157056,23 @@ Shifting token number (7.9: 1) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (7.9: 1) -> $$ = nterm exp (7.9: 1) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (7.8: ) $2 = nterm exp (7.9: 1) -> $$ = nterm exp (7.8-9: -1) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (7.1-4: -1) $2 = token '=' (7.6: ) $3 = nterm exp (7.8-9: -1) @@ -156566,15 +157081,16 @@ Stack now 0 6 8 Next token is token '\n' (7.10-8.0: ) Shifting token '\n' (7.10-8.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (7.1-9: -1) $2 = token '\n' (7.10-8.0: ) -> $$ = nterm line (7.1-8.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-7.0: ) $2 = nterm line (7.1-8.0: ) -> $$ = nterm input (1.1-8.0: ) @@ -156585,12 +157101,12 @@ Shifting token '\n' (8.1-9.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (8.1-9.0: ) -> $$ = nterm line (8.1-9.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-8.0: ) $2 = nterm line (8.1-9.0: ) -> $$ = nterm input (1.1-9.0: ) @@ -156601,7 +157117,7 @@ Shifting token number (9.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.1: 1) -> $$ = nterm exp (9.1: 1) Entering state 8 @@ -156616,14 +157132,16 @@ Shifting token number (9.5: 2) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.5: 2) -> $$ = nterm exp (9.5: 2) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for '-' +LAC: checking lookahead '-': R8 G8 S19 +Reducing stack by rule 8 (line 104): $1 = nterm exp (9.1: 1) $2 = token '-' (9.3: ) $3 = nterm exp (9.5: 2) @@ -156632,6 +157150,7 @@ Stack now 0 6 8 Next token is token '-' (9.7: ) Shifting token '-' (9.7: ) +LAC: initial context discarded due to shift Entering state 19 Stack now 0 6 8 19 Reading a token @@ -156639,14 +157158,16 @@ Shifting token number (9.9: 3) Entering state 1 Stack now 0 6 8 19 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.9: 3) -> $$ = nterm exp (9.9: 3) Entering state 28 Stack now 0 6 8 19 28 Reading a token Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): $1 = nterm exp (9.1-5: -1) $2 = token '-' (9.7: ) $3 = nterm exp (9.9: 3) @@ -156655,6 +157176,7 @@ Stack now 0 6 8 Next token is token '=' (9.11: ) Shifting token '=' (9.11: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -156667,21 +157189,23 @@ Shifting token number (9.14: 4) Entering state 1 Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (9.14: 4) -> $$ = nterm exp (9.14: 4) Entering state 10 Stack now 0 6 8 18 2 10 Reading a token Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R11 G27 R6 G8 S24 +Reducing stack by rule 11 (line 115): $1 = token '-' (9.13: ) $2 = nterm exp (9.14: 4) -> $$ = nterm exp (9.13-14: -4) Entering state 27 Stack now 0 6 8 18 27 Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): +Reducing stack by rule 6 (line 93): $1 = nterm exp (9.1-9: -4) $2 = token '=' (9.11: ) $3 = nterm exp (9.13-14: -4) @@ -156690,15 +157214,16 @@ Stack now 0 6 8 Next token is token '\n' (9.15-10.0: ) Shifting token '\n' (9.15-10.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (9.1-14: -4) $2 = token '\n' (9.15-10.0: ) -> $$ = nterm line (9.1-10.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-9.0: ) $2 = nterm line (9.1-10.0: ) -> $$ = nterm input (1.1-10.0: ) @@ -156709,7 +157234,7 @@ Shifting token number (10.1: 1) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.1: 1) -> $$ = nterm exp (10.1: 1) Entering state 8 @@ -156729,7 +157254,7 @@ Shifting token number (10.6: 2) Entering state 1 Stack now 0 6 8 19 4 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.6: 2) -> $$ = nterm exp (10.6: 2) Entering state 12 @@ -156744,14 +157269,16 @@ Shifting token number (10.10: 3) Entering state 1 Stack now 0 6 8 19 4 12 19 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.10: 3) -> $$ = nterm exp (10.10: 3) Entering state 28 Stack now 0 6 8 19 4 12 19 28 Reading a token Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for ')' +LAC: checking lookahead ')': R8 G12 S26 +Reducing stack by rule 8 (line 104): $1 = nterm exp (10.6: 2) $2 = token '-' (10.8: ) $3 = nterm exp (10.10: 3) @@ -156760,9 +157287,10 @@ Stack now 0 6 8 19 4 12 Next token is token ')' (10.11: ) Shifting token ')' (10.11: ) +LAC: initial context discarded due to shift Entering state 26 Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): +Reducing stack by rule 13 (line 117): $1 = token '(' (10.5: ) $2 = nterm exp (10.6-10: -1) $3 = token ')' (10.11: ) @@ -156771,7 +157299,9 @@ Stack now 0 6 8 19 28 Reading a token Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): +LAC: initial context established for '=' +LAC: checking lookahead '=': R8 G8 S18 +Reducing stack by rule 8 (line 104): $1 = nterm exp (10.1: 1) $2 = token '-' (10.3: ) $3 = nterm exp (10.5-11: -1) @@ -156780,6 +157310,7 @@ Stack now 0 6 8 Next token is token '=' (10.13: ) Shifting token '=' (10.13: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -156787,14 +157318,16 @@ Shifting token number (10.15: 2) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (10.15: 2) -> $$ = nterm exp (10.15: 2) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (10.1-11: 2) $2 = token '=' (10.13: ) $3 = nterm exp (10.15: 2) @@ -156803,15 +157336,16 @@ Stack now 0 6 8 Next token is token '\n' (10.16-11.0: ) Shifting token '\n' (10.16-11.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (10.1-15: 2) $2 = token '\n' (10.16-11.0: ) -> $$ = nterm line (10.1-11.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-10.0: ) $2 = nterm line (10.1-11.0: ) -> $$ = nterm input (1.1-11.0: ) @@ -156822,12 +157356,12 @@ Shifting token '\n' (11.1-12.0: ) Entering state 3 Stack now 0 6 3 -Reducing stack by rule 3 (line 74): +Reducing stack by rule 3 (line 87): $1 = token '\n' (11.1-12.0: ) -> $$ = nterm line (11.1-12.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-11.0: ) $2 = nterm line (11.1-12.0: ) -> $$ = nterm input (1.1-12.0: ) @@ -156838,7 +157372,7 @@ Shifting token number (12.1: 2) Entering state 1 Stack now 0 6 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.1: 2) -> $$ = nterm exp (12.1: 2) Entering state 8 @@ -156853,7 +157387,7 @@ Shifting token number (12.3: 2) Entering state 1 Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.3: 2) -> $$ = nterm exp (12.3: 2) Entering state 32 @@ -156868,14 +157402,16 @@ Shifting token number (12.5: 3) Entering state 1 Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.5: 3) -> $$ = nterm exp (12.5: 3) Entering state 32 Stack now 0 6 8 23 32 23 32 Reading a token Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G32 R12 G8 S18 +Reducing stack by rule 12 (line 116): $1 = nterm exp (12.3: 2) $2 = token '^' (12.4: ) $3 = nterm exp (12.5: 3) @@ -156883,7 +157419,7 @@ Entering state 32 Stack now 0 6 8 23 32 Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): +Reducing stack by rule 12 (line 116): $1 = nterm exp (12.1: 2) $2 = token '^' (12.2: ) $3 = nterm exp (12.3-5: 8) @@ -156892,6 +157428,7 @@ Stack now 0 6 8 Next token is token '=' (12.7: ) Shifting token '=' (12.7: ) +LAC: initial context discarded due to shift Entering state 18 Stack now 0 6 8 18 Reading a token @@ -156899,14 +157436,16 @@ Shifting token number (12.9-11: 256) Entering state 1 Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): +Reducing stack by rule 5 (line 92): $1 = token number (12.9-11: 256) -> $$ = nterm exp (12.9-11: 256) Entering state 27 Stack now 0 6 8 18 27 Reading a token Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): $1 = nterm exp (12.1-5: 256) $2 = token '=' (12.7: ) $3 = nterm exp (12.9-11: 256) @@ -156915,146 +157454,1152 @@ Stack now 0 6 8 Next token is token '\n' (12.12-13.0: ) Shifting token '\n' (12.12-13.0: ) +LAC: initial context discarded due to shift Entering state 24 Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): +Reducing stack by rule 4 (line 88): $1 = nterm exp (12.1-11: 256) $2 = token '\n' (12.12-13.0: ) -> $$ = nterm line (12.1-13.0: ) Entering state 17 Stack now 0 6 17 -Reducing stack by rule 2 (line 70): +Reducing stack by rule 2 (line 83): $1 = nterm input (1.1-12.0: ) $2 = nterm line (12.1-13.0: ) -> $$ = nterm input (1.1-13.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 -Stack now 0 6 4 +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Stack now 0 6 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Stack now 0 6 4 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Stack now 0 6 4 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Stack now 0 6 4 12 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Stack now 0 6 4 12 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Stack now 0 6 4 12 23 32 +Reading a token +Next token is token ')' (13.5: ) +LAC: initial context established for ')' +LAC: checking lookahead ')': R12 G12 S26 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Stack now 0 6 4 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +LAC: initial context discarded due to shift +Entering state 26 +Stack now 0 6 4 12 26 +Reducing stack by rule 13 (line 117): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Stack now 0 6 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Stack now 0 6 8 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Stack now 0 6 8 23 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Stack now 0 6 8 23 32 +Reading a token +Next token is token '=' (13.9: ) +LAC: initial context established for '=' +LAC: checking lookahead '=': R12 G8 S18 +Reducing stack by rule 12 (line 116): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +LAC: initial context discarded due to shift +Entering state 18 +Stack now 0 6 8 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Stack now 0 6 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Stack now 0 6 8 18 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +LAC: initial context established for '\n' +LAC: checking lookahead '\n': R6 G8 S24 +Reducing stack by rule 6 (line 93): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Stack now 0 6 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +LAC: initial context discarded due to shift +Entering state 24 +Stack now 0 6 8 24 +Reducing stack by rule 4 (line 88): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Stack now 0 6 17 +Reducing stack by rule 2 (line 83): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of file (14.1: ) +Shifting token end of file (14.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of file (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1445: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1435: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: cat stderr +./calc.at:1445: cat stderr +./calc.at:1443: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: cat stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 2 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1453: cat stderr +stderr: +./calc.at:1435: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +input: +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +533. calc.at:1445: ok +input: + | + | +1 +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1446: cat stderr +stderr: +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +input: +./calc.at:1454: $PREPARSER ./calc input +input: +input: +stderr: + | 1//2 +./calc.at:1446: $PREPARSER ./calc input +527. calc.at:1435: | (- *) + (1 2) = 1 +./calc.at:1443: $PREPARSER ./calc input + ok +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token number (1.3: 2) +LAC: initial context established for number +LAC: checking lookahead number: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: Err +LAC: checking lookahead '=': S18 +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token number (1.3: 2) +Stack now 0 +stderr: +stderr: +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +stderr: +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: + +./calc.at:1455: cat stderr +stderr: +./calc.at:1457: cat stderr +./calc.at:1448: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.11: syntax error, unexpected number +1.1-16: error: 2222 != 1 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + +stderr: +./calc.at:1457: $PREPARSER ./calc /dev/null +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stdout: +stderr: +stderr: +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | 1//2 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1449: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1454: cat stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: cat stderr +./calc.at:1446: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +LAC: initial context established for '/' +LAC: checking lookahead '/': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +input: +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1449: $PREPARSER ./calc input + | error +./calc.at:1446: $PREPARSER ./calc input +input: +stderr: +input: +./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + #) = 1111 +./calc.at:1453: $PREPARSER ./calc input +stderr: +stderr: + | (- *) + (1 2) = 1 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '/' (1.3: ) +LAC: initial context established for '/' +LAC: checking lookahead '/': Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': Err +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '/' (1.3: ) +Stack now 0 +./calc.at:1454: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: +1.6: syntax error: invalid character: '#' +input: +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: cat stderr +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 2 +./calc.at:1448: $PREPARSER ./calc input +546. calc.at:1476: testing Calculator C++ %glr-parser ... +./calc.at:1476: mv calc.y.tmp calc.y + +./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +1.6: syntax error: invalid character: '#' +stderr: +stderr: +stderr: +stderr: +input: +input: +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) +1.3: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1457: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +./calc.at:1443: $PREPARSER ./calc input +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.12: syntax error, unexpected number +1.1-17: error: 2222 != 1 +./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr +547. calc.at:1476: testing Calculator glr2.cc ... +./calc.at:1455: cat stderr +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: mv calc.y.tmp calc.y + +stderr: +./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1453: cat stderr +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +stderr: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +error: 4444 != 1 +input: +stderr: + | (# + 1) = 1111 +input: +./calc.at:1453: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 -Stack now 0 6 8 18 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 -Stack now 0 6 8 18 27 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: cat stderr -stderr: -stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157064,8 +158609,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: "$PERL" -pi -e 'use strict; +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -157075,6 +158619,77 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1446: cat stderr + | error +./calc.at:1455: $PREPARSER ./calc input + | 1 2 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1454: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +stderr: +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1448: cat stderr +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +stderr: +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1457: cat stderr +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token invalid token (1.1: ) +LAC: initial context established for invalid token +LAC: checking lookahead invalid token: Err +LAC: checking lookahead end of file: Err +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +LAC: checking lookahead ')': Err +LAC: checking lookahead '!': S5 +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +Stack now 0 +input: + | (* *) + (*) + (*) +./calc.at:1454: $PREPARSER ./calc input + | 1 = 2 = 3 Starting parse Entering state 0 Stack now 0 @@ -157226,11 +158841,189 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error, unexpected end of input -./calc.at:1449: cat stderr stderr: -./calc.at:1445: cat stderr - +./calc.at:1446: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' + | 1//2 +stderr: +./calc.at:1448: $PREPARSER ./calc input +stderr: +stderr: +1.3: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.3: syntax error +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: cat stderr +input: +stderr: +./calc.at:1443: cat stderr + | (!!) + (1 2) = 1 +stderr: +./calc.at:1457: $PREPARSER ./calc input +1.3: syntax error +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: +input: +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (1 + # + 1) = 1111 +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: cat stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1453: $PREPARSER ./calc input +stderr: +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1476: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +1.6: syntax error: invalid character: '#' Starting parse Entering state 0 Stack now 0 @@ -157275,7 +159068,7 @@ Entering state 30 Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) @@ -157283,7 +159076,7 @@ -> $$ = nterm exp (1.5-9: 6) Entering state 29 Stack now 0 8 20 29 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) @@ -157291,979 +159084,980 @@ -> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.13: 7) -Shifting token number (1.13: 7) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Stack now 0 8 18 27 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr +stderr: +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +input: +./calc.at:1454: cat stderr +./calc.at:1446: cat stderr +1.6: syntax error: invalid character: '#' + | 1 = 2 = 3 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1448: cat stderr +input: +input: + | 1//2 +stderr: +./calc.at:1449: $PREPARSER ./calc input +stderr: +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1454: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (2.1: 1) -Shifting token number (2.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.1: 1) --> $$ = nterm exp (2.1: 1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 -Stack now 0 6 8 20 +Stack now 0 8 20 Reading a token -Next token is token number (2.5: 2) -Shifting token number (2.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 20 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 -Stack now 0 6 8 20 29 +Stack now 0 8 20 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 -Stack now 0 6 8 20 29 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Stack now 0 6 8 20 29 21 2 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (2.10: 3) -Shifting token number (2.10: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 20 29 21 2 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Stack now 0 6 8 20 29 21 2 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 -Stack now 0 6 8 20 29 21 30 -Next token is token '=' (2.12: ) +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Stack now 0 6 8 20 29 -Next token is token '=' (2.12: ) +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (2.15: 5) -Shifting token number (2.15: 5) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Stack now 0 6 2 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (4.2: 1) -Shifting token number (4.2: 1) -Entering state 1 -Stack now 0 6 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Stack now 0 6 2 10 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Stack now 0 6 2 10 23 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (4.4: 2) -Shifting token number (4.4: 2) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 2 10 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Stack now 0 6 2 10 23 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (4.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Stack now 0 6 8 18 2 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (4.9: 1) -Shifting token number (4.9: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Stack now 0 6 4 +Stack now 0 8 18 27 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Stack now 0 6 4 2 +Next token is token '=' (1.7: ) +LAC: checking lookahead '=': Err +LAC: checking lookahead end of file: R6 G8 Err +LAC: checking lookahead number: R6 G8 Err +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +LAC: checking lookahead NEG: R6 G8 Err +LAC: checking lookahead '^': S23 +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 + | error +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1457: cat stderr +input: +1.3: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | + | +1 +./calc.at:1446: $PREPARSER ./calc input +1.1: syntax error +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +stderr: +1.3: syntax error +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (5.3: 1) -Shifting token number (5.3: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 4 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Stack now 0 6 4 2 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Stack now 0 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Stack now 0 6 8 23 -Reading a token -Next token is token number (5.6: 2) -Shifting token number (5.6: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Stack now 0 6 8 23 32 +Stack now 0 8 Reading a token -Next token is token '=' (5.8: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (5.10: 1) -Shifting token number (5.10: 1) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (5.10: 1) --> $$ = nterm exp (5.10: 1) +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 92): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '=' (1.7: ) +LAC: checking lookahead '=': Err +LAC: checking lookahead end of file: R6 G8 Err +LAC: checking lookahead number: R6 G8 Err +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S19 +LAC: checking lookahead '+': S20 +LAC: checking lookahead '*': S21 +LAC: checking lookahead '/': S22 +LAC: checking lookahead NEG: R6 G8 Err +LAC: checking lookahead '^': S23 +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Stack now 0 8 18 +Error: popping token '=' (1.3: ) +Stack now 0 8 +Error: popping nterm exp (1.1: 1) +Stack now 0 +Cleanup: discarding lookahead token '=' (1.7: ) +Stack now 0 +input: +stderr: +stderr: +stderr: +./calc.at:1451: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1453: cat stderr +input: +./calc.at:1454: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | (- *) + (1 2) = 1 +./calc.at:1457: $PREPARSER ./calc input +1.1: syntax error +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) Entering state 3 -Stack now 0 6 3 +Stack now 0 3 Reducing stack by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Stack now 0 6 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Stack now 0 6 2 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Stack now 0 6 2 2 2 -Reading a token -Next token is token number (7.4: 1) -Shifting token number (7.4: 1) -Entering state 1 -Stack now 0 6 2 2 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Stack now 0 6 2 2 2 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Stack now 0 6 2 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Stack now 0 6 2 10 -Next token is token '=' (7.6: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Stack now 0 6 8 18 2 -Reading a token -Next token is token number (7.9: 1) -Shifting token number (7.9: 1) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Stack now 0 6 8 18 2 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 + | 1 + 2 * 3 + !- ++ +stderr: +./calc.at:1443: $PREPARSER ./calc input +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1454: $PREPARSER ./calc input + | (1 + 1) / (1 - 1) +./calc.at:1453: $PREPARSER ./calc input +stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1451: $PREPARSER ./calc input +stderr: +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (9.1: 1) -Shifting token number (9.1: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 +Stack now 0 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (9.5: 2) -Shifting token number (9.5: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '-' (9.7: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Stack now 0 6 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Stack now 0 6 8 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (9.9: 3) -Shifting token number (9.9: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 19 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (9.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Stack now 0 6 8 18 2 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token number (9.14: 4) -Shifting token number (9.14: 4) -Entering state 1 -Stack now 0 6 8 18 2 1 -Reducing stack by rule 5 (line 79): - $1 = token number (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 -Stack now 0 6 8 18 2 10 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +stderr: +stderr: +stderr: +./calc.at:1449: cat stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Stack now 0 6 8 18 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (10.1: 1) -Shifting token number (10.1: 1) +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +stderr: +1.11-17: error: null divisor +./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +error: 2222 != 1 +./calc.at:1455: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +1.11-17: error: null divisor +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + | error +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +./calc.at:1448: cat stderr +./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr +input: +./calc.at:1454: cat stderr +1.1: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 -Stack now 0 6 1 +Stack now 0 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Stack now 0 6 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Stack now 0 6 8 19 +Stack now 0 8 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 -Stack now 0 6 8 19 4 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (10.6: 2) -Shifting token number (10.6: 2) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 6 8 19 4 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Stack now 0 6 8 19 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Stack now 0 6 8 19 4 12 19 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 Reading a token -Next token is token number (10.10: 3) -Shifting token number (10.10: 3) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 -Stack now 0 6 8 19 4 12 19 1 +Stack now 0 8 20 29 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Stack now 0 6 8 19 4 12 19 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Stack now 0 6 8 19 4 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Stack now 0 6 8 19 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Stack now 0 6 8 19 28 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 Reading a token -Next token is token '=' (10.13: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (10.15: 2) -Shifting token number (10.15: 2) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Stack now 0 6 8 18 27 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) + | + | +1 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1453: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1446: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1457: $PREPARSER ./calc input +stderr: +input: +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1453: cat stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Stack now 0 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Next token is token '+' (2.1: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +LAC: checking lookahead end of file: S16 +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: + | 1 = 2 = 3 +stderr: + | 1 2 +./calc.at:1451: $PREPARSER ./calc input +input: +./calc.at:1446: $PREPARSER ./calc /dev/null +./calc.at:1448: $PREPARSER ./calc input +1.1: syntax error + | (#) + (#) = 2222 +./calc.at:1454: $PREPARSER ./calc input +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.7: syntax error +stderr: +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) Entering state 3 -Stack now 0 6 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) +Stack now 0 3 +Reducing stack by rule 3 (line 87): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 82): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token number (12.1: 2) -Shifting token number (12.1: 2) -Entering state 1 -Stack now 0 6 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Stack now 0 6 8 +Next token is token '+' (2.1: ) +LAC: initial context established for '+' +LAC: checking lookahead '+': Err +LAC: checking lookahead end of file: S16 +LAC: checking lookahead number: S1 +LAC: checking lookahead '=': Err +LAC: checking lookahead '-': S2 +LAC: checking lookahead '+': Err +LAC: checking lookahead '*': Err +LAC: checking lookahead '/': Err +LAC: checking lookahead NEG: Err +LAC: checking lookahead '^': Err +LAC: checking lookahead '\n': S3 +LAC: checking lookahead '(': S4 +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Stack now 0 +Cleanup: discarding lookahead token '+' (2.1: ) +Stack now 0 +1.3: syntax error +./calc.at:1443: cat stderr +stderr: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Stack now 0 6 8 23 +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +stderr: +538. calc.at:1453: ok +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.3: syntax error +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: cat stderr +input: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token number (12.3: 2) -Shifting token number (12.3: 2) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Stack now 0 6 8 23 32 +Next token is token end of input (1.1: ) +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +Stack now 0 +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | (#) + (#) = 2222 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1455: cat stderr + +input: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: cat stderr +./calc.at:1455: $PREPARSER ./calc /dev/null +./calc.at:1448: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1457: $PREPARSER ./calc input +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Stack now 0 6 8 23 32 23 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token number (12.5: 3) -Shifting token number (12.5: 3) -Entering state 1 -Stack now 0 6 8 23 32 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Stack now 0 6 8 23 32 23 32 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Stack now 0 6 8 23 32 -Next token is token '=' (12.7: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Stack now 0 6 8 18 -Reading a token -Next token is token number (12.9-11: 256) -Shifting token number (12.9-11: 256) -Entering state 1 -Stack now 0 6 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Stack now 0 6 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 -Stack now 0 6 4 -Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) -Entering state 1 -Stack now 0 6 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Stack now 0 6 4 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Stack now 0 6 4 12 23 -Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) -Entering state 1 -Stack now 0 6 4 12 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Stack now 0 6 4 12 23 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Stack now 0 6 4 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Stack now 0 6 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Stack now 0 6 8 +Stack now 0 8 20 4 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Stack now 0 6 8 23 +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) -Entering state 1 -Stack now 0 6 8 23 1 -Reducing stack by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Stack now 0 6 8 23 32 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (13.9: ) -Reducing stack by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 -Stack now 0 6 8 18 +Stack now 0 8 18 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 -Stack now 0 6 8 18 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 -Stack now 0 6 8 18 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Stack now 0 6 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 -Stack now 0 6 8 24 +Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Stack now 0 6 17 -Reducing stack by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input (14.1: ) -Shifting token end of input (14.1: ) +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1457: cat stderr -./calc.at:1455: $PREPARSER ./calc /dev/null -./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr -input: +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (# + 1) = 1111 +stderr: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: cat stderr +input: Starting parse Entering state 0 Stack now 0 @@ -158287,151 +160081,147 @@ 1.1: syntax error, unexpected end of file Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 -./calc.at:1445: $PREPARSER ./calc input -input: -./calc.at:1451: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1449: $PREPARSER ./calc input -input: ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1448: $PREPARSER ./calc input input: stderr: stderr: - | 1 2 -./calc.at:1446: $PREPARSER ./calc input -stderr: - | 1 = 2 = 3 +input: +input: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1457: $PREPARSER ./calc input -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: cat stderr -stdout: -stderr: -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1 = 2 = 3 +./calc.at:1446: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1454: $PREPARSER ./calc input +./calc.at:1449: $PREPARSER ./calc input stderr: stderr: -./calc.at:1453: cat stderr -./calc.at:1458: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - +./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -158455,137 +160245,336 @@ 1.1: syntax error, unexpected end of file Cleanup: discarding lookahead token end of file (1.1: ) Stack now 0 - +./calc.at:1451: cat stderr +2.1: syntax error +stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 -Stack now 0 4 +Stack now 0 8 20 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () -Stack now 0 4 -Shifting token error () +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) +Entering state 1 +Stack now 0 8 20 4 12 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Stack now 0 8 20 4 12 21 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.47-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1455: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1443: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -158595,8 +160584,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1449: "$PERL" -pi -e 'use strict; +stderr: +2.1: syntax error +input: +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -158606,164 +160597,377 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1//2 ./calc.at:1451: $PREPARSER ./calc input +1.7: syntax error stderr: -./calc.at:1443: $PREPARSER ./calc input -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) input: stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -input: -./calc.at:1455: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token number (1.3: 2) -Stack now 0 -./calc.at:1458: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 -Stack now 0 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) +Entering state 1 +Stack now 0 8 20 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Stack now 0 8 20 4 12 20 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Stack now 0 8 20 4 12 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.7-15: 3) +Stack now 0 8 20 4 +Shifting token error (1.7-18: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 Stack now 0 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Error: popping token error (1.23: ) +Stack now 0 8 20 4 +Shifting token error (1.23-25: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Error: popping token error (1.23-25: ) +Stack now 0 8 20 4 +Shifting token error (1.23-27: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 -Stack now 0 8 20 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) Entering state 21 -Stack now 0 8 20 29 21 +Stack now 0 8 20 4 12 21 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 -Stack now 0 8 20 29 21 1 +Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) Entering state 30 -Stack now 0 8 20 29 21 30 +Stack now 0 8 20 4 12 21 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '*' (1.39: ) Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Stack now 0 8 20 4 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Stack now 0 8 20 4 12 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Stack now 0 8 20 4 12 +Error: popping nterm exp (1.33-37: 2) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Error: popping token error (1.33-41: ) +Stack now 0 8 20 4 +Shifting token error (1.33-41: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '=' (1.44: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1449: cat stderr -547. calc.at:1476: testing Calculator glr2.cc ... -input: -stderr: -stderr: -./calc.at:1476: mv calc.y.tmp calc.y - - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1455: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 -./calc.at:1449: $PREPARSER ./calc input -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token '\n' (1.47-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) 1.1-46: error: 4444 != 1 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +548. calc.at:1477: testing Calculator C++ %glr-parser %locations ... +./calc.at:1477: mv calc.y.tmp calc.y + +./calc.at:1443: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1455: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1457: $PREPARSER ./calc input +1.3: syntax error +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: cat stderr +./calc.at:1446: cat stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1445: "$PERL" -pi -e 'use strict; +1.3: syntax error +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -158773,7 +160977,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: "$PERL" -pi -e 'use strict; +input: +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + #) = 1111 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1448: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1455: $PREPARSER ./calc input +input: +input: + | (# + 1) = 1111 +./calc.at:1454: $PREPARSER ./calc input +stderr: +./calc.at:1448: $PREPARSER ./calc /dev/null + | (!!) + (1 2) = 1 +stderr: +./calc.at:1446: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -158783,6 +161006,105 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -159068,9 +161390,7 @@ Entering state 1 Stack now 0 8 20 4 12 21 1 Reducing stack by rule 5 (line 92): - $1.11: syntax error -1.1-16: error: 2222 != 1 -1 = token number (1.37: 2) + $1 = token number (1.37: 2) -> $$ = nterm exp (1.37: 2) Entering state 30 Stack now 0 8 20 4 12 21 30 @@ -159200,127 +161520,260 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +1.1: syntax error +1.2: syntax error: invalid character: '#' +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.1-46: error: 4444 != 1 -stderr: -stdout: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: cat stderr stderr: -./calc.at:1457: cat stderr -1.11: syntax error -1.1-16: error: 2222 != 1 +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Stack now 0 4 +Shifting token error (1.2-3: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) Entering state 1 -Stack now 0 8 20 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Stack now 0 8 20 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 20 29 21 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 -Stack now 0 8 20 +Stack now 0 4 12 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1443: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1445: cat stderr -input: - | (1 + # + 1) = 1111 -stderr: +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1451: cat stderr stderr: -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1468: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -159735,130 +162188,148 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1446: cat stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Stack now 0 4 5 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Stack now 0 4 12 -Error: popping nterm exp (1) -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Stack now 0 4 5 15 +Reducing stack by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-3: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.9: 1) +Shifting token number (1.9: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 +Next token is token number (1.11: 2) +Error: discarding token number (1.11: 2) +Error: popping token error (1.9-11: ) +Stack now 0 8 20 4 +Shifting token error (1.9-11: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: -548. calc.at:1477: testing Calculator C++ %glr-parser %locations ... -./calc.at:1477: mv calc.y.tmp calc.y - -./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1449: cat stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1454: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -159868,7 +162339,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1451: "$PERL" -pi -e 'use strict; +input: +./calc.at:1457: cat stderr +1.1: syntax error + | + | +1 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1454: cat stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -159878,6 +162356,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1477: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: input: ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -159889,397 +162370,397 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1443: $PREPARSER ./calc input -input: -input: +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +./calc.at:1451: $PREPARSER ./calc input +2.1: syntax error + | (#) + (#) = 2222 +./calc.at:1457: $PREPARSER ./calc input +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr ./calc.at:1455: cat stderr input: - | 1//2 +input: +1.1: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr + | (- *) + (1 2) = 1 ./calc.at:1446: $PREPARSER ./calc input - | 1 2 - | - | +1 - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1453: cat stderr -stderr: -./calc.at:1468: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -stderr: stderr: -input: + | (1 + # + 1) = 1111 +./calc.at:1454: $PREPARSER ./calc input stderr: stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.6: syntax error: invalid character: '#' +./calc.at:1448: cat stderr +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +2.1: syntax error +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: - | (- *) + (1 2) = 1 -input: -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1451: cat stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Stack now 0 4 12 -Error: popping nterm exp (1) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 -Next token is token invalid token () -Error: discarding token invalid token () -Error: popping token error () +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) Stack now 0 4 -Shifting token error () +Shifting token error (1.2-4: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' () -Error: discarding token '+' () -Error: popping token error () -Stack now 0 4 -Shifting token error () -Entering state 11 -Stack now 0 4 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1) -Error: discarding token number (1) -Error: popping token error () -Stack now 0 4 -Shifting token error () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) +Entering state 1 +Stack now 0 8 20 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 +Reading a token +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) Entering state 11 -Stack now 0 4 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 -Stack now 0 4 11 25 +Stack now 0 8 20 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Stack now 0 6 Reading a token -Next token is token end of input () -Shifting token end of input () +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) Entering state 16 Stack now 0 6 16 Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (!!) + (1 2) = 1 +stderr: +input: +stderr: ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1453: $PREPARSER ./calc input +1.1: syntax error + | (# + 1) = 1111 +input: stderr: + | (!!) + (1 2) = 1 +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1455: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1448: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: stderr: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Stack now 0 8 22 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Stack now 0 4 2 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '/' (1.3: ) -Stack now 0 -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Stack now 0 4 2 9 +Reducing stack by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2-4: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Stack now 0 8 20 Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) +Next token is token number (1.10: 1) +Shifting token number (1.10: 1) Entering state 1 -Stack now 0 8 20 29 21 1 +Stack now 0 8 20 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 + $1 = token number (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Stack now 0 8 20 4 12 Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token number (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token number (1.12: 2) +Error: discarding token number (1.12: 2) +Error: popping token error (1.10-12: ) +Stack now 0 8 20 4 +Shifting token error (1.10-12: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) Entering state 29 Stack now 0 8 20 29 -Next token is token '+' (1.11: ) +Reading a token +Next token is token '=' (1.15: ) Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 +Next token is token number (1.17: 1) +Shifting token number (1.17: 1) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -1.4: syntax error -1.12: syntax error +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) 1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -160441,21 +162922,125 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: cat stderr stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +./calc.at:1451: cat stderr +./calc.at:1449: $PREPARSER ./calc /dev/null +./calc.at:1454: cat stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error, unexpected number -1.1-16: error: 2222 != 1 +./calc.at:1457: cat stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 Starting parse Entering state 0 Stack now 0 @@ -160615,59 +163200,9 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (- *) + (1 2) = 1 -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: $PREPARSER ./calc input -input: -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1458: cat stderr - | 1 2 stderr: -./calc.at:1446: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: $PREPARSER ./calc input -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1445: cat stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; +1.1: syntax error +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -160677,14 +163212,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1457: cat stderr -input: -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -160695,222 +163224,15 @@ }eg ' expout || exit 77 input: -input: - | 1//2 - | error -./calc.at:1446: $PREPARSER ./calc input -stderr: -./calc.at:1458: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -stderr: -stderr: -./calc.at:1453: cat stderr -./calc.at:1445: $PREPARSER ./calc input -./calc.at:1457: $PREPARSER ./calc /dev/null -./calc.at:1443: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 -./calc.at:1477: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stderr: -stdout: -syntax error stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' () -Reducing stack by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1455: cat stderr -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1459: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - input: - | (- *) + (1 2) = 1 - | (#) + (#) = 2222 -./calc.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1453: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1449: cat stderr -stderr: -stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -Stack now 0 + | 1 = 2 = 3 input: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1454: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1457: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -160933,91 +163255,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -161034,163 +163329,11 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' () -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' () -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' () -Reducing stack by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input () -Shifting token end of input () -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; +1.11-17: error: null divisor +./calc.at:1454: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -161200,141 +163343,79 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1459: $PREPARSER ./calc input stderr: -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.12: syntax error, unexpected number -1.1-17: error: 2222 != 1 -./calc.at:1455: $PREPARSER ./calc input stderr: +1.1: syntax error +1.7: syntax error +syntax error: invalid character: '#' +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: cat stderr +stderr: +./calc.at:1446: cat stderr stderr: +./calc.at:1455: cat stderr +syntax error: invalid character: '#' +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11-17: error: null divisor +1.7: syntax error +input: +./calc.at:1443: cat stderr +input: + | (!!) + (1 2) = 1 +input: +./calc.at:1448: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1455: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1457: cat stderr +./calc.at:1446: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1454: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -161521,69 +163602,446 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1449: cat stderr ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (* *) + (*) + (*) -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1458: cat stderr + | (1 + # + 1) = 1111 stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1443: $PREPARSER ./calc input +./calc.at:1454: cat stderr ./calc.at:1451: cat stderr -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1445: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: + | (# + 1) = 1111 +./calc.at:1457: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Error: popping token error (1.2: ) +Stack now 0 4 +Shifting token error (1.2-4: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Error: popping token error (1.10: ) +Stack now 0 8 20 4 +Shifting token error (1.10: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Error: popping token error (1.16: ) +Stack now 0 8 20 4 +Shifting token error (1.16: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +539. calc.at:1454: ok +stderr: +input: +input: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | + | +1 Starting parse Entering state 0 Stack now 0 @@ -161770,23 +164228,8 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1457: cat stderr -stderr: -./calc.at:1453: cat stderr -./calc.at:1445: cat stderr - | error -./calc.at:1443: cat stderr -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1446: cat stderr -./calc.at:1455: "$PERL" -pi -e 'use strict; +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -161796,16 +164239,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -input: -input: -./calc.at:1451: $PREPARSER ./calc input -./calc.at:1455: cat stderr -533. calc.at:1445: ok -./calc.at:1449: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -161815,28 +164254,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | (* *) + (*) + (*) -./calc.at:1453: $PREPARSER ./calc input -input: - | (1 + #) = 1111 -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1468: cat stderr stderr: stderr: -./calc.at:1457: $PREPARSER ./calc input -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | 1 2 +2.1: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: cat stderr +./calc.at:1446: cat stderr stderr: +./calc.at:1455: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1459: $PREPARSER ./calc input -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error +input: +syntax error: invalid character: '#' +stderr: + | (- *) + (1 2) = 1 +./calc.at:1448: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -161878,48 +164324,64 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -161936,154 +164398,237 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -stderr: -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -input: -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 stderr: -stderr: -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: cat stderr +2.1: syntax error +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 input: - | 1 = 2 = 3 -stderr: -stdout: -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1458: cat stderr +./calc.at:1455: cat stderr + + | 1 + 2 * 3 + !+ ++ ./calc.at:1446: $PREPARSER ./calc input stderr: -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1448: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -error: 4444 != 1 -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 4 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 -Stack now 0 4 12 20 +Stack now 0 8 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (* *) + (*) + (*) +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1455: $PREPARSER ./calc input +stderr: +./calc.at:1443: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1457: cat stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Stack now 0 8 20 5 14 +Reducing stack by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -162301,70 +164846,18 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.2: syntax error -1.10: syntax error -1.16: syntax error ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1451: cat stderr +./calc.at:1449: cat stderr input: + | 1 + 2 * 3 + !- ++ +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1451: $PREPARSER ./calc /dev/null stderr: - | 1 = 2 = 3 - | 1 + 2 * 3 + !+ ++ -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1458: $PREPARSER ./calc input -input: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) -Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 stderr: - +input: +1.1: syntax error +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -162582,48 +165075,255 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1448: cat stderr + | (1 + 1) / (1 - 1) stderr: -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1448: $PREPARSER ./calc input +./calc.at:1443: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Stack now 0 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Stack now 0 8 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Stack now 0 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1457: $PREPARSER ./calc input +549. calc.at:1477: testing Calculator glr2.cc %locations ... +./calc.at:1477: mv calc.y.tmp calc.y + stderr: +1.1: syntax error +stderr: + | (!!) + (1 2) = 1 stderr: -1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1449: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1448: $PREPARSER ./calc input +stderr: +./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -162634,15 +165334,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1457: cat stderr -./calc.at:1443: cat stderr -./calc.at:1459: cat stderr -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: cat stderr -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Stack now 0 @@ -162657,61 +165348,93 @@ Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Stack now 0 8 20 Reading a token Next token is token number (1.5: 2) Shifting token number (1.5: 2) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 20 1 Reducing stack by rule 5 (line 79): $1 = token number (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 27 -Stack now 0 8 18 27 +Entering state 29 +Stack now 0 8 20 29 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Stack now 0 8 18 -Error: popping token '=' (1.3: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Stack now 0 8 20 29 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Stack now 0 8 20 29 21 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Stack now 0 8 20 29 21 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Stack now 0 8 20 29 +Next token is token '+' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 Stack now 0 8 -Error: popping nterm exp (1.1: 1) -Stack now 0 -Cleanup: discarding lookahead token '=' (1.7: ) -Stack now 0 -./calc.at:1455: cat stderr -./calc.at:1451: cat stderr -input: -stderr: -stderr: -input: - | 1//2 -./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Stack now 0 8 20 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Stack now 0 8 20 5 13 +Reducing stack by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Stack now 0 8 20 +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: -./calc.at:1459: $PREPARSER ./calc input stderr: -input: -./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr -syntax error - | 1 + 2 * 3 + !+ ++ -./calc.at:1453: $PREPARSER ./calc input -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) - | (# + 1) = 1111 -input: stderr: -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1443: $PREPARSER ./calc input -input: +1.2: syntax error +1.10: syntax error +1.16: syntax error stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1455: $PREPARSER ./calc input -input: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; +./calc.at:1458: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -162721,11 +165444,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1451: $PREPARSER ./calc input -stderr: - | (!!) + (1 2) = 1 -./calc.at:1457: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr Starting parse Entering state 0 Stack now 0 @@ -162735,77 +165457,122 @@ Entering state 4 Stack now 0 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 Reading a token Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 Reading a token Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) + $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 22 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -162822,12 +165589,68 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: - | 1 2 +input: +input: stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +1.2: syntax error +1.10: syntax error +1.16: syntax error + | 1 + 2 * 3 + !+ ++ +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1443: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1451: cat stderr stderr: +./calc.at:1446: cat stderr stderr: +./calc.at:1457: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1443: cat stderr Starting parse Entering state 0 Stack now 0 @@ -162911,21 +165734,9 @@ Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1448: $PREPARSER ./calc input -stderr: ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1446: cat stderr -stderr: input: -./calc.at:1458: cat stderr -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -162935,10 +165746,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.3: syntax error -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 + | (#) + (#) = 2222 +stderr: +./calc.at:1446: $PREPARSER ./calc input +input: +./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +input: +./calc.at:1451: $PREPARSER ./calc input +532. calc.at:1443: ok stderr: Starting parse Entering state 0 @@ -163024,10 +165840,18 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1455: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: $PREPARSER ./calc input stderr: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1448: cat stderr +./calc.at:1457: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +stderr: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -163050,64 +165874,91 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -163124,56 +165975,143 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input: -./calc.at:1459: cat stderr -./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr - | 1 + 2 * 3 + !- ++ input: -./calc.at:1453: $PREPARSER ./calc input -stderr: -549. calc.at:1477: testing Calculator glr2.cc %locations ... +./calc.at:1449: cat stderr + | 1 2 +error: null divisor input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1448: $PREPARSER ./calc input input: - | - | +1 -./calc.at:1477: mv calc.y.tmp calc.y - - | - | +1 -./calc.at:1446: $PREPARSER ./calc input +./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1458: $PREPARSER ./calc input -stderr: -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !- ++ +stderr: +stderr: ./calc.at:1455: $PREPARSER ./calc input stderr: -1.3: syntax error -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1457: cat stderr -./calc.at:1468: cat stderr + +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: -./calc.at:1477: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +error: null divisor Starting parse Entering state 0 Stack now 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Stack now 0 8 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Stack now 0 8 20 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Next token is token invalid token (1.8: ) +Error: discarding token invalid token (1.8: ) +Error: popping token error (1.1-8: ) +Stack now 0 8 20 4 +Shifting token error (1.1-8: ) +Entering state 11 +Stack now 0 8 20 4 11 +Reading a token +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Stack now 0 8 20 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.1-8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Stack now 0 8 20 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -163183,23 +166121,13 @@ Entering state 6 Stack now 0 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -input: -input: -./calc.at:1443: cat stderr -stderr: -stderr: -stderr: - | error -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | error -./calc.at:1459: $PREPARSER ./calc input +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -163284,19 +166212,34 @@ Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: $PREPARSER ./calc input -stderr: -stderr: +input: +./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +./calc.at:1446: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1449: $PREPARSER ./calc input stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1451: $PREPARSER ./calc input stderr: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) Starting parse Entering state 0 Stack now 0 @@ -163380,69 +166323,17 @@ Stack now 0 8 20 Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - | (- *) + (1 2) = 1 -./calc.at:1457: $PREPARSER ./calc input -stderr: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + # + 1) = 1111 -./calc.at:1443: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Stack now 0 3 -Reducing stack by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Stack now 0 -Cleanup: discarding lookahead token '+' (2.1: ) -Stack now 0 -stderr: -stderr: -syntax error -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1446: cat stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1457: cat stderr +./calc.at:1451: cat stderr ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -163453,38 +166344,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1449: cat stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1453: cat stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1448: cat stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1459: cat stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -error: 2222 != 1 -./calc.at:1446: "$PERL" -pi -e 'use strict; +./calc.at:1458: cat stderr +input: +./calc.at:1455: cat stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span} ... +./calc.at:1478: mv calc.y.tmp calc.y + +541. calc.at:1457: ok +input: + | (1 + #) = 1111 +./calc.at:1446: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -163494,7 +166375,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1468: "$PERL" -pi -e 'use strict; +input: + | (#) + (#) = 2222 +./calc.at:1455: $PREPARSER ./calc input +stderr: +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -163505,154 +166390,10 @@ }eg ' expout || exit 77 stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | (#) + (#) = 2222 -./calc.at:1449: $PREPARSER ./calc input -input: -input: -stderr: -./calc.at:1455: cat stderr | 1//2 -./calc.at:1448: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1453: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1458: cat stderr -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1451: cat stderr -./calc.at:1446: cat stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -./calc.at:1468: cat stderr -./calc.at:1446: $PREPARSER ./calc /dev/null -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: +./calc.at:1458: $PREPARSER ./calc input +1.11: syntax error +1.1-16: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -163694,64 +166435,48 @@ Entering state 11 Stack now 0 4 11 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -163768,30 +166493,14 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) - | (#) + (#) = 2222 -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1458: $PREPARSER ./calc /dev/null -input: -stderr: +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1457: cat stderr - | (#) + (#) = 2222 stderr: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -1.3: syntax error +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11: syntax error +1.1-16: error: 2222 != 1 Starting parse Entering state 0 Stack now 0 @@ -163927,42 +166636,124 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1449: cat stderr +./calc.at:1459: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1448: cat stderr ./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -1.3: syntax error -stderr: -input: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Reading a token +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Stack now 0 8 18 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -164099,28 +166890,21 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) - | 1 = 2 = 3 -./calc.at:1468: $PREPARSER ./calc input -stderr: -./calc.at:1477: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS - | (* *) + (*) + (*) -./calc.at:1457: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token end of input (1.1: ) -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Stack now 0 -./calc.at:1458: cat stderr -./calc.at:1449: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1453: "$PERL" -pi -e 'use strict; +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164131,32 +166915,15 @@ }eg ' expout || exit 77 stderr: -stderr: -./calc.at:1443: cat stderr -syntax error -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: cat stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: cat stderr +./calc.at:1459: $PREPARSER ./calc input input: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +stderr: input: - | (1 + #) = 1111 -./calc.at:1455: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | (* *) + (*) + (*) ./calc.at:1449: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1448: "$PERL" -pi -e 'use strict; + | (#) + (#) = 2222 +./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164166,10 +166933,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1458: $PREPARSER ./calc input -input: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: $PREPARSER ./calc input stderr: -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164179,9 +166946,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + 1) / (1 - 1) -./calc.at:1443: $PREPARSER ./calc input -./calc.at:1446: "$PERL" -pi -e 'use strict; +1.2: syntax error +1.10: syntax error +1.16: syntax error +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1451: cat stderr +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1446: cat stderr +./calc.at:1458: cat stderr +./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1455: cat stderr +stderr: +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error +input: + | (- *) + (1 2) = 1 +./calc.at:1451: $PREPARSER ./calc input +551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span} ... +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +input: +input: +stderr: +input: + | error + | 1 2 +./calc.at:1458: $PREPARSER ./calc input +input: +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164191,179 +166990,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -stderr: -input: -stderr: -input: -./calc.at:1455: cat stderr -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | - | +1 + | (# + 1) = 1111 ./calc.at:1459: $PREPARSER ./calc input +./calc.at:1478: mv calc.y.tmp calc.y + +./calc.at:1446: $PREPARSER ./calc input +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -1.6: syntax error: invalid character: '#' | (1 + #) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1453: $PREPARSER ./calc input -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1443: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1455: $PREPARSER ./calc input +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1446: cat stderr -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: stderr: Starting parse @@ -164375,122 +167018,77 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) +Entering state 11 +Stack now 0 4 11 Reading a token Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) +Entering state 11 +Stack now 0 4 11 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Stack now 0 4 11 25 +Reducing stack by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Stack now 0 8 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 -Stack now 0 8 22 4 12 19 1 +Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -164507,22 +167105,8 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + #) = 1111 -stderr: -./calc.at:1455: $PREPARSER ./calc input -./calc.at:1448: cat stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -1.6: syntax error: invalid character: '#' -./calc.at:1451: cat stderr -stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; +./calc.at:1449: cat stderr +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -164532,6 +167116,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -164637,52 +167225,10 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1458: cat stderr -2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1457: cat stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -input: -input: -./calc.at:1443: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 - | error -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1446: $PREPARSER ./calc input -input: - | (!!) + (1 2) = 1 -./calc.at:1458: $PREPARSER ./calc input +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -164788,343 +167334,12 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.1: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -./calc.at:1453: cat stderr -./calc.at:1443: cat stderr +1.3: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: -./calc.at:1459: cat stderr - | (1 + #) = 1111 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +input: stderr: -./calc.at:1451: $PREPARSER ./calc input -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -./calc.at:1468: cat stderr stderr: -./calc.at:1459: $PREPARSER ./calc /dev/null -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1: syntax error -./calc.at:1449: cat stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -165135,22 +167350,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -532. calc.at:1443: ok -input: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ - | (# + 1) = 1111 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1455: cat stderr -./calc.at:1453: $PREPARSER ./calc input -input: -stderr: -./calc.at:1457: $PREPARSER ./calc input -stderr: -stderr: Starting parse Entering state 0 Stack now 0 @@ -165160,298 +167359,77 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.1-2: ) Entering state 11 Stack now 0 4 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) -Entering state 1 -Stack now 0 8 20 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Stack now 0 8 20 4 12 20 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Stack now 0 8 20 4 12 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.7-15: 3) -Stack now 0 8 20 4 -Shifting token error (1.7-18: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23: ) +Next token is token invalid token (1.2: ) +Error: discarding token invalid token (1.2: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-2: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Error: popping token error (1.23: ) -Stack now 0 8 20 4 -Shifting token error (1.23-25: ) +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Error: popping token error (1.1-2: ) +Stack now 0 4 +Shifting token error (1.1-4: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Error: popping token error (1.23-25: ) -Stack now 0 8 20 4 -Shifting token error (1.23-27: ) +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Error: popping token error (1.1-4: ) +Stack now 0 4 +Shifting token error (1.1-6: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = token '(' (1.1: ) + $2 = token error (1.1-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Stack now 0 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Stack now 0 8 20 4 12 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Stack now 0 8 20 4 12 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Stack now 0 8 20 4 12 21 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Stack now 0 8 20 4 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Stack now 0 8 20 4 12 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Stack now 0 8 20 4 12 -Error: popping nterm exp (1.33-37: 2) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Error: popping token error (1.33-41: ) -Stack now 0 8 20 4 -Shifting token error (1.33-41: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -165468,14 +167446,11 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -1.6: syntax error: invalid character: '#' - | - | +1 -./calc.at:1468: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !+ ++ +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1449: $PREPARSER ./calc input +stdout: +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165485,16 +167460,21 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1468: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1455: cat stderr +./calc.at:1459: cat stderr stderr: -input: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' - | (# + 1) = 1111 -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: $PREPARSER ./calc input -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (# + 1) = 1111 +./calc.at:1448: cat stderr ./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -165505,32 +167485,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1455: $PREPARSER ./calc input +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1458: cat stderr +input: +./calc.at:1446: cat stderr stderr: +./calc.at:1451: cat stderr + | 1//2 +./calc.at:1459: $PREPARSER ./calc input +input: +input: +input: + | (# + 1) = 1111 +./calc.at:1455: $PREPARSER ./calc input +./calc.at:1449: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1468: $PREPARSER ./calc input + | (1 + #) = 1111 +./calc.at:1448: $PREPARSER ./calc input stderr: +input: +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) stderr: -syntax error -./calc.at:1451: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: cat stderr -1.2: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1457: $EGREP -c -v 'Return for a new token:|LAC:' stderr Starting parse Entering state 0 Stack now 0 @@ -165639,35 +167630,24 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1448: cat stderr -stderr: - -./calc.at:1459: cat stderr -./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -syntax error -input: -stderr: - | (- *) + (1 2) = 1 +./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1458: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -1.2: syntax error: invalid character: '#' -./calc.at:1446: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ + | (1 + # + 1) = 1111 +./calc.at:1449: $PREPARSER ./calc input input: +./calc.at:1446: $PREPARSER ./calc input +stderr: + | (* *) + (*) + (*) +./calc.at:1451: $PREPARSER ./calc input +stderr: stderr: -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1457: $PREPARSER ./calc input Starting parse Entering state 0 Stack now 0 @@ -165776,11 +167756,19 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: stderr: -input: -./calc.at:1453: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +stderr: +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error +1.10: syntax error +1.16: syntax error +1.3: syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 @@ -165790,121 +167778,96 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -165921,18 +167884,12 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1459: $PREPARSER ./calc input -stderr: +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 - | 1 = 2 = 3 -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: -./calc.at:1455: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165942,16 +167899,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1453: $PREPARSER ./calc input -input: -stderr: -stderr: - | (# + 1) = 1111 -./calc.at:1451: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1449: "$PERL" -pi -e 'use strict; +./calc.at:1459: cat stderr +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -165961,14 +167910,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | 1 2 +./calc.at:1468: $PREPARSER ./calc input +1.2: syntax error +1.10: syntax error +1.16: syntax error stderr: stderr: -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -./calc.at:1455: cat stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +stderr: Starting parse Entering state 0 Stack now 0 @@ -165978,121 +167928,96 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Stack now 0 4 5 +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Stack now 0 4 5 15 -Reducing stack by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Stack now 0 4 12 +Error: popping nterm exp (1.2: 1) Stack now 0 4 -Shifting token error (1.2-3: ) +Shifting token error (1.2-6: ) +Entering state 11 +Stack now 0 4 11 +Next token is token invalid token (1.6: ) +Error: discarding token invalid token (1.6: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-6: ) Entering state 11 Stack now 0 4 11 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.9: 1) -Shifting token number (1.9: 1) -Entering state 1 -Stack now 0 8 20 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Error: popping token error (1.2-6: ) +Stack now 0 4 +Shifting token error (1.2-8: ) Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.11: 2) -Error: discarding token number (1.11: 2) -Error: popping token error (1.9-11: ) -Stack now 0 8 20 4 -Shifting token error (1.9-11: ) +Stack now 0 4 11 +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Error: popping token error (1.2-8: ) +Stack now 0 4 +Shifting token error (1.2-10: ) Entering state 11 -Stack now 0 8 20 4 11 +Stack now 0 4 11 Reading a token -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 -Stack now 0 8 20 4 11 25 +Stack now 0 4 11 25 Reducing stack by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Stack now 0 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Stack now 0 8 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Stack now 0 8 18 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Stack now 0 8 18 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Stack now 0 7 @@ -166109,11 +168034,15 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -1.6: syntax error: invalid character: '#' -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: "$PERL" -pi -e 'use strict; +input: + | error +./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1459: $PREPARSER ./calc input +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr +stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -166123,16 +168052,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.7: syntax error -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: -stderr: -1.6: syntax error: invalid character: '#' -input: -1.2: syntax error: invalid character: '#' +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -166143,26 +168064,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1457: cat stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -stderr: -stderr: - | (1 + # + 1) = 1111 -./calc.at:1449: cat stderr -./calc.at:1455: $PREPARSER ./calc input -1.7: syntax error -./calc.at:1468: cat stderr -1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.1-46: error: 4444 != 1 -input: -input: - | (1 + # + 1) = 1111 -./calc.at:1449: $PREPARSER ./calc input +./calc.at:1458: cat stderr +syntax error +./calc.at:1448: cat stderr ./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -166173,34 +168077,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1458: cat stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: ./calc.at:1446: cat stderr - | (#) + (#) = 2222 -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1468: $PREPARSER ./calc /dev/null -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: + | (1 + # + 1) = 1111 +stderr: +./calc.at:1449: cat stderr + | + | +1 +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1455: $PREPARSER ./calc input +input: stderr: +1.1: syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +input: +./calc.at:1451: cat stderr stderr: -./calc.at:1459: cat stderr Starting parse Entering state 0 Stack now 0 @@ -166328,49 +168221,175 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -1.6: syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1453: cat stderr -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -550. calc.at:1478: testing Calculator C++ %glr-parser %locations api.location.type={Span} ... - | (* *) + (*) + (*) -./calc.at:1458: $PREPARSER ./calc input -input: -stderr: -./calc.at:1478: mv calc.y.tmp calc.y - -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1451: cat stderr -stderr: -stderr: -input: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | (!!) + (1 2) = 1 -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -syntax error: invalid character: '#' -syntax error: invalid character: '#' +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | (1 + 1) / (1 - 1) -./calc.at:1453: $PREPARSER ./calc input -stderr: -syntax error + | (# + 1) = 1111 +./calc.at:1446: $PREPARSER ./calc input +./calc.at:1448: $PREPARSER ./calc input ./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 +./calc.at:1459: cat stderr +stderr: stderr: +1.2: syntax error: invalid character: '#' +stderr: + | (#) + (#) = 2222 +./calc.at:1449: $PREPARSER ./calc input +Starting parse +Entering state 0 +Stack now 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Stack now 0 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Stack now 0 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Stack now 0 4 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Stack now 0 4 12 20 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Stack now 0 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Stack now 0 8 22 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Stack now 0 8 22 4 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Stack now 0 8 22 4 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Stack now 0 8 22 4 12 19 1 +Reducing stack by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Stack now 0 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Stack now 0 8 24 +Reducing stack by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Stack now 0 7 +Reducing stack by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Stack now 0 6 +Reading a token +Next token is token end of input (2.1: ) +Shifting token end of input (2.1: ) +Entering state 16 +Stack now 0 6 16 +Stack now 0 6 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Stack now 0 @@ -166498,32 +168517,16 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1448: cat stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: stderr: -1.11-17: error: null divisor -./calc.at:1453: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (- *) + (1 2) = 1 -./calc.at:1446: $PREPARSER ./calc input -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | (1 + # + 1) = 1111 -./calc.at:1451: $PREPARSER ./calc input stderr: -1.11-17: error: null divisor -./calc.at:1468: "$PERL" -pi -e 'use strict; +input: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -166533,14 +168536,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: -stderr: -1.6: syntax error: invalid character: '#' ./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-16: error: 2222 != 1 -input: +stderr: ./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -166551,10 +168554,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1449: cat stderr - | - | +1 -./calc.at:1448: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1468: cat stderr +stderr: +./calc.at:1458: cat stderr Starting parse Entering state 0 Stack now 0 @@ -166564,293 +168568,113 @@ Entering state 4 Stack now 0 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Stack now 0 4 12 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Stack now 0 4 12 20 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 4 12 20 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1459: cat stderr -./calc.at:1458: cat stderr -./calc.at:1457: cat stderr -2.1: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1453: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1455: cat stderr -./calc.at:1468: cat stderr -stderr: -2.1: syntax error -1.6: syntax error: invalid character: '#' -stderr: -input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Stack now 0 4 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Stack now 0 4 2 9 -Reducing stack by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2-4: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Stack now 0 4 12 20 29 Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): +Next token is token ')' (1.7: ) +Reducing stack by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Stack now 0 4 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Stack now 0 4 12 26 +Reducing stack by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Stack now 0 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Stack now 0 8 22 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 -Stack now 0 8 20 4 +Stack now 0 8 22 4 Reading a token -Next token is token number (1.10: 1) -Shifting token number (1.10: 1) +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 -Stack now 0 8 20 4 1 +Stack now 0 8 22 4 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 -Stack now 0 8 20 4 12 -Reading a token -Next token is token number (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token number (1.12: 2) -Error: discarding token number (1.12: 2) -Error: popping token error (1.10-12: ) -Stack now 0 8 20 4 -Shifting token error (1.10-12: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Stack now 0 8 20 29 +Stack now 0 8 22 4 12 Reading a token -Next token is token '=' (1.15: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Stack now 0 8 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Stack now 0 8 22 4 12 19 Reading a token -Next token is token number (1.17: 1) -Shifting token number (1.17: 1) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 -Stack now 0 8 18 1 +Stack now 0 8 22 4 12 19 1 Reducing stack by rule 5 (line 79): - $1 = token number (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Stack now 0 8 18 27 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Stack now 0 8 22 4 12 19 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Stack now 0 8 22 4 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Stack now 0 8 22 4 12 26 +Reducing stack by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Stack now 0 8 22 31 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) +Reducing stack by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 Stack now 0 8 Next token is token '\n' (1.18-2.0: ) @@ -166858,7 +168682,7 @@ Entering state 24 Stack now 0 8 24 Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) + $1 = nterm exp (1.1-17: 2) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -166876,49 +168700,10 @@ Stack now 0 6 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -input: - | (1 + 1) / (1 - 1) -./calc.at:1449: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1468: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1459: $PREPARSER ./calc input -input: - | (1 + #) = 1111 -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1453: cat stderr -stderr: -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - | 1 + 2 * 3 + !+ ++ -stderr: -./calc.at:1458: $PREPARSER ./calc input +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) stderr: -1.11-17: error: null divisor -syntax error: invalid character: '#' -./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1451: "$PERL" -pi -e 'use strict; +./calc.at:1455: cat stderr +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -166928,10 +168713,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -538. calc.at:1453: ok +./calc.at:1451: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1458: $PREPARSER ./calc /dev/null +./calc.at:1448: cat stderr ./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: ./calc.at:1446: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -166942,26 +168728,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +input: +input: | (1 + 1) / (1 - 1) ./calc.at:1455: $PREPARSER ./calc input -stderr: -1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -1.1-17: error: 2222 != 1 -syntax error: invalid character: '#' -1.11-17: error: null divisor -./calc.at:1448: cat stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 + | 1//2 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1446: cat stderr +input: +./calc.at:1468: $PREPARSER ./calc input stderr: +stderr: +input: Starting parse Entering state 0 Stack now 0 @@ -167113,26 +168891,22 @@ Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1455: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1449: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1451: cat stderr -./calc.at:1448: $PREPARSER ./calc /dev/null +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) + | 1 + 2 * 3 + !- ++ stderr: stderr: +./calc.at:1449: cat stderr +./calc.at:1451: $PREPARSER ./calc input stderr: - -./calc.at:1459: cat stderr -./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr -1.1: syntax error -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +syntax error + | (1 + # + 1) = 1111 +stderr: +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1448: $PREPARSER ./calc input +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +534. calc.at:1446: ok Starting parse Entering state 0 Stack now 0 @@ -167283,8 +169057,13 @@ Stack now 0 6 16 Cleanup: popping token end of file (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stderr: input: -./calc.at:1468: "$PERL" -pi -e 'use strict; + | (1 + #) = 1111 +./calc.at:1449: $PREPARSER ./calc input +syntax error +./calc.at:1455: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167294,27 +169073,29 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (* *) + (*) + (*) -./calc.at:1446: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: cat stderr stderr: -./calc.at:1457: cat stderr -input: -./calc.at:1449: cat stderr -1.1: syntax error -input: +./calc.at:1458: cat stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1455: cat stderr + +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1478: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS - | (* *) + (*) + (*) - | (1 + 1) / (1 - 1) -./calc.at:1451: $PREPARSER ./calc input input: input: -536. calc.at:1449: ok +540. calc.at:1455: ok +1.6: syntax error: invalid character: '#' + | + | +1 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1459: $PREPARSER ./calc input -./calc.at:1468: cat stderr - | 1 + 2 * 3 + !- ++ ./calc.at:1458: $PREPARSER ./calc input -./calc.at:1455: "$PERL" -pi -e 'use strict; +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167324,157 +169105,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: ./calc.at:1448: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -167485,204 +169127,172 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: +./calc.at:1449: cat stderr stderr: +./calc.at:1451: cat stderr +./calc.at:1468: cat stderr +./calc.at:1448: cat stderr +2.1: syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +input: +input: | (# + 1) = 1111 +./calc.at:1449: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1451: $PREPARSER ./calc input +input: stderr: -1.11-17: error: null divisor -./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: $PREPARSER ./calc input -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: cat stderr input: -./calc.at:1455: cat stderr + | (1 + 1) / (1 - 1) stderr: -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) - | (!!) + (1 2) = 1 +./calc.at:1459: cat stderr +./calc.at:1448: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose ... +./calc.at:1479: mv calc.y.tmp calc.y + + | error ./calc.at:1468: $PREPARSER ./calc input -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1459: $PREPARSER ./calc /dev/null +1.11-17: error: null divisor +stderr: +./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +input: +1.2: syntax error: invalid character: '#' +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' stderr: stderr: syntax error -error: 2222 != 1 -540. calc.at:1455: ok -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Error: popping token error (1.2: ) -Stack now 0 4 -Shifting token error (1.2-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Error: popping token error (1.10: ) -Stack now 0 8 20 4 -Shifting token error (1.10: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Error: popping token error (1.16: ) -Stack now 0 8 20 4 -Shifting token error (1.16: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) + | (!!) + (1 2) = 1 +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: cat stderr -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: $PREPARSER ./calc input +stderr: stderr: +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +1.11-17: error: null divisor +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose ... stderr: +./calc.at:1479: mv calc.y.tmp calc.y +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 stderr: +./calc.at:1448: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +./calc.at:1449: cat stderr +./calc.at:1451: cat stderr stderr: -syntax error -error: 2222 != 1 +./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: -1.11-17: error: null divisor +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 + | (1 + # + 1) = 1111 +./calc.at:1449: $PREPARSER ./calc input +./calc.at:1448: cat stderr +./calc.at:1459: cat stderr +input: +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -syntax error: invalid character: '#' -1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +535. calc.at:1448: | (1 + #) = 1111 + ok +1.6: syntax error: invalid character: '#' +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: cat stderr +input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1448: $PREPARSER ./calc input +./calc.at:1459: $PREPARSER ./calc input stderr: -./calc.at:1458: cat stderr - -./calc.at:1446: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.1-46: error: 4444 != 1 +1.6: syntax error: invalid character: '#' +1.6: syntax error: invalid character: '#' +./calc.at:1468: cat stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167692,31 +169302,78 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -551. calc.at:1478: testing Calculator glr2.cc %locations api.location.type={Span} ... -./calc.at:1478: mv calc.y.tmp calc.y +stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error + | (- *) + (1 2) = 1 +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1449: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.18: syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +1.23: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.41: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) 1.1-46: error: 4444 != 1 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (#) + (#) = 2222 -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1446: cat stderr +stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 + | 1 = 2 = 3 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1459: cat stderr +./calc.at:1479: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +stderr: +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: cat stderr +stderr: +./calc.at:1449: cat stderr +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +input: +stderr: +input: + | (# + 1) = 1111 +input: +554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose ... +./calc.at:1480: mv calc.y.tmp calc.y + +./calc.at:1451: $PREPARSER ./calc input +syntax error +./calc.at:1458: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | (!!) + (1 2) = 1 +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1449: $PREPARSER ./calc input +stderr: +stderr: +stderr: +1.2: syntax error: invalid character: '#' +input: +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +1.11-17: error: null divisor +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1449: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1458: $PREPARSER ./calc input +stderr: +./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS stderr: stderr: 1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1478: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -167727,6 +169384,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.11-17: error: null divisor +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.11: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-16: error: 2222 != 1 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1451: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -167737,15 +169402,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1457: cat stderr stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; +./calc.at:1449: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -167755,227 +169413,95 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !+ ++ -./calc.at:1446: $PREPARSER ./calc input -stderr: -input: -./calc.at:1458: cat stderr -stderr: - | (1 + # + 1) = 1111 -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1457: $PREPARSER ./calc input +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +./calc.at:1459: cat stderr ./calc.at:1468: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1448: cat stderr +./calc.at:1449: cat stderr ./calc.at:1451: cat stderr -stderr: -./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1458: cat stderr input: +./calc.at:1480: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS | (- *) + (1 2) = 1 -stderr: +./calc.at:1459: $PREPARSER ./calc input +536. calc.at:1449: ok +input: + | + | +1 ./calc.at:1468: $PREPARSER ./calc input -552. calc.at:1479: testing Calculator C++ %glr-parser %header parse.error=verbose %name-prefix "calc" %verbose ... -./calc.at:1479: mv calc.y.tmp calc.y - +stderr: input: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 input: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1448: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' - | (1 + #) = 1111 -syntax error + | 1 + 2 * 3 + !+ ++ + | (1 + # + 1) = 1111 syntax error -error: 2222 != 1 -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: $PREPARSER ./calc input ./calc.at:1458: $PREPARSER ./calc input ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -537. calc.at:1451: ok +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +stdout: stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + stderr: -syntax error -syntax error -error: 2222 != 1 - | 1 + 2 * 3 + !- ++ -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Stack now 0 8 20 5 14 -Reducing stack by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1446: $EGREP -c -v 'Return for a new token:|LAC:' stderr + stderr: +1.4: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.12: syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +1.1-17: error: 2222 != 1 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error stderr: 1.6: syntax error: invalid character: '#' +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -syntax error: invalid character: '#' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1459: cat stderr stderr: +./calc.at:1458: $EGREP -c -v 'Return for a new token:|LAC:' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: ./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -167986,12 +169512,54 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -553. calc.at:1479: testing Calculator glr2.cc %header parse.error=verbose %name-prefix "calc" %verbose ... -./calc.at:1479: mv calc.y.tmp calc.y - +stderr: + | (* *) + (*) + (*) +input: +./calc.at:1459: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +input: +stderr: +./calc.at:1458: $PREPARSER ./calc input + | 1 2 +./calc.at:1451: cat stderr +./calc.at:1476: $PREPARSER ./calc input +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose ... +./calc.at:1480: mv calc.y.tmp calc.y +./calc.at:1468: cat stderr stderr: -./calc.at:1448: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1468: $PREPARSER ./calc /dev/null +./calc.at:1451: $PREPARSER ./calc input +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.10: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +1.16: syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +stderr: +stderr: +syntax error +1.11-17: error: null divisor +./calc.at:1451: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +stderr: +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1459: cat stderr +syntax error +1.11-17: error: null divisor +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168001,231 +169569,98 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1458: cat stderr input: -./calc.at:1479: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -1.6: syntax error: invalid character: '#' - | 1 + 2 * 3 + !- ++ -./calc.at:1459: cat stderr -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1468: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1451: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: stderr: -./calc.at:1457: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) | (#) + (#) = 2222 -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1458: cat stderr -./calc.at:1448: cat stderr +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1476: cat stderr stderr: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' +./calc.at:1451: cat stderr input: -input: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) - | (1 + 1) / (1 - 1) -./calc.at:1468: $PREPARSER ./calc input -input: +./calc.at:1468: cat stderr +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1457: $PREPARSER ./calc input -input: +./calc.at:1459: $EGREP -c -v 'Return for a new token:|LAC:' stderr + | 1//2 +./calc.at:1476: $PREPARSER ./calc input +537. calc.at:1451: ok stderr: - | (# + 1) = 1111 -./calc.at:1458: $PREPARSER ./calc input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Stack now 0 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Stack now 0 8 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Stack now 0 8 20 29 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Stack now 0 8 20 29 21 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Stack now 0 8 20 29 21 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Stack now 0 8 20 29 -Next token is token '+' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Stack now 0 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Stack now 0 8 20 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Stack now 0 8 20 5 13 -Reducing stack by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Stack now 0 8 20 -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (- *) + (1 2) = 1 +./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS 1.2: syntax error: invalid character: '#' 1.8: syntax error: invalid character: '#' -./calc.at:1448: $PREPARSER ./calc input -stderr: stderr: +input: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1468: $PREPARSER ./calc input +input: stderr: stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 + | 1 + 2 * 3 + !- ++ +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1458: cat stderr syntax error syntax error syntax error -error: null divisor -./calc.at:1457: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error: invalid character: '#' +syntax error +syntax error +error: 4444 != 1 ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -554. calc.at:1480: testing Calculator C++ %glr-parser parse.error=verbose api.prefix={calc} %verbose ... -./calc.at:1480: mv calc.y.tmp calc.y -./calc.at:1459: cat stderr +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 + | (1 + #) = 1111 +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +1.6: syntax error: invalid character: '#' ./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -error: null divisor -./calc.at:1446: "$PERL" -pi -e 'use strict; +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168235,17 +169670,67 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1476: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1459: cat stderr +input: + | error +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1468: cat stderr +input: + | (#) + (#) = 2222 +stderr: syntax error +./calc.at:1458: cat stderr +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +556. calc.at:1482: testing Calculator C++ %glr-parser %debug ... +stderr: +./calc.at:1482: mv calc.y.tmp calc.y + syntax error + | (# + 1) = 1111 +input: +./calc.at:1458: $PREPARSER ./calc input + | (!!) + (1 2) = 1 +./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: syntax error -./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +error: 2222 != 1 +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: -./calc.at:1479: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS 1.2: syntax error: invalid character: '#' +stderr: +syntax error +error: 2222 != 1 +./calc.at:1459: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1458: cat stderr +input: | (1 + #) = 1111 -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168255,12 +169740,63 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1446: cat stderr +./calc.at:1459: $PREPARSER ./calc input +./calc.at:1476: cat stderr +input: stderr: + | (1 + # + 1) = 1111 1.6: syntax error: invalid character: '#' +./calc.at:1458: $PREPARSER ./calc input +./calc.at:1468: cat stderr ./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input: +1.6: syntax error: invalid character: '#' + | 1 = 2 = 3 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stderr: + | (- *) + (1 2) = 1 +1.6: syntax error: invalid character: '#' +./calc.at:1468: $PREPARSER ./calc input +stderr: +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.6: syntax error: invalid character: '#' +syntax error +syntax error +error: 2222 != 1 +./calc.at:1459: cat stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +stderr: +syntax error +syntax error +error: 2222 != 1 ./calc.at:1458: cat stderr +input: + | (# + 1) = 1111 +./calc.at:1459: $PREPARSER ./calc input +input: +stderr: + | (1 + 1) / (1 - 1) +./calc.at:1458: $PREPARSER ./calc input +1.2: syntax error: invalid character: '#' +./calc.at:1482: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -168271,305 +169807,129 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1446: $PREPARSER ./calc input -./calc.at:1457: cat stderr stderr: -./calc.at:1448: cat stderr -1.6: syntax error: invalid character: '#' +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token 1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -541. calc.at:1457: ok +./calc.at:1476: cat stderr +stderr: +1.11-17: error: null divisor +./calc.at:1468: cat stderr input: + | + | +1 ./calc.at:1459: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1458: $PREPARSER ./calc input -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1458: cat stderr stderr: input: +syntax error +input: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (* *) + (*) + (*) -./calc.at:1448: $PREPARSER ./calc input - | (# + 1) = 1111 +./calc.at:1468: $PREPARSER ./calc input + | (1 + # + 1) = 1111 ./calc.at:1459: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' +542. calc.at:1458: ok +stderr: +syntax error +syntax error +syntax error +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +1.6: syntax error: invalid character: '#' +syntax error stderr: -1.2: syntax error: invalid character: '#' +./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +stderr: + +1.6: syntax error: invalid character: '#' +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1459: cat stderr +./calc.at:1476: cat stderr +input: ./calc.at:1468: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1476: $PREPARSER ./calc /dev/null +./calc.at:1459: $PREPARSER ./calc input +stderr: +stderr: +557. calc.at:1482: testing Calculator glr2.cc %debug ... +1.11-17: error: null divisor +./calc.at:1482: mv calc.y.tmp calc.y + +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y ./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error input: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Stack now 0 8 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Stack now 0 8 20 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Next token is token invalid token (1.8: ) -Error: discarding token invalid token (1.8: ) -Error: popping token error (1.1-8: ) -Stack now 0 8 20 4 -Shifting token error (1.1-8: ) -Entering state 11 -Stack now 0 8 20 4 11 -Reading a token -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Stack now 0 8 20 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.1-8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Stack now 0 8 20 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +stderr: | 1 + 2 * 3 + !+ ++ -./calc.at:1468: $PREPARSER ./calc input stderr: -1.2: syntax error: invalid character: '#' +./calc.at:1468: $PREPARSER ./calc input +1.11-17: error: null divisor +syntax error stderr: ./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: ./calc.at:1459: cat stderr stderr: +543. calc.at:1459: ok input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1 + 2 * 3 + !- ++ ./calc.at:1468: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error stderr: +./calc.at:1476: cat stderr +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1476: $PREPARSER ./calc input stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1446: "$PERL" -pi -e 'use strict; +./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168579,11 +169939,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1459: $PREPARSER ./calc input -./calc.at:1448: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose ... +./calc.at:1485: mv calc.y.tmp calc.y + +./calc.at:1468: cat stderr +./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168593,20 +169959,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -./calc.at:1480: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1446: cat stderr -./calc.at:1458: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1448: cat stderr + | (#) + (#) = 2222 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1476: cat stderr +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (1 + 1) / (1 - 1) -./calc.at:1458: $PREPARSER ./calc input - | (1 + #) = 1111 stderr: + | (!!) + (1 2) = 1 +./calc.at:1476: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +syntax error +error: 2222 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error +error: 2222 != 1 ./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -168617,238 +169990,102 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -555. calc.at:1480: testing Calculator glr2.cc parse.error=verbose api.prefix={calc} %verbose ... -1.6: syntax error: invalid character: '#' +./calc.at:1468: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: -./calc.at:1446: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ + | (1 + #) = 1111 +./calc.at:1468: $PREPARSER ./calc input +./calc.at:1485: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: -./calc.at:1448: $PREPARSER ./calc input -1.11-17: error: null divisor +syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr stderr: +syntax error: invalid character: '#' +input: + | (- *) + (1 2) = 1 +./calc.at:1476: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: mv calc.y.tmp calc.y - -./calc.at:1480: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +syntax error +syntax error +error: 2222 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1468: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1459: cat stderr -./calc.at:1448: $EGREP -c -v 'Return for a new token:|LAC:' stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +error: 2222 != 1 ./calc.at:1468: cat stderr +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1468: $PREPARSER ./calc input stderr: stderr: -1.11-17: error: null divisor -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: +syntax error: invalid character: '#' +stdout: +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +stderr: +syntax error: invalid character: '#' +./calc.at:1476: cat stderr input: input: - | 1 + 2 * 3 + !- ++ -./calc.at:1448: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1459: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1468: $PREPARSER ./calc input + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1477: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1476: $PREPARSER ./calc input stderr: -./calc.at:1446: "$PERL" -pi -e 'use strict; +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168859,21 +170096,21 @@ }eg ' expout || exit 77 stderr: -1.11-17: error: null divisor -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1458: cat stderr -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1459: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: cat stderr -542. calc.at:1458: ok +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: -1.11-17: error: null divisor -./calc.at:1448: "$PERL" -pi -e 'use strict; +syntax error +syntax error +syntax error +./calc.at:1468: cat stderr +input: + | 1 2 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -168883,208 +170120,37 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +input: +1.3: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1468: $PREPARSER ./calc input +stderr: +stderr: syntax error: invalid character: '#' +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +1.3: syntax error +stderr: syntax error: invalid character: '#' +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | (# + 1) = 1111 -./calc.at:1446: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1476: $PREPARSER ./calc input stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1459: cat stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1480: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.2: ) -Error: discarding token invalid token (1.2: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-2: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Error: popping token error (1.1-2: ) -Stack now 0 4 -Shifting token error (1.1-4: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Error: popping token error (1.1-4: ) -Stack now 0 4 -Shifting token error (1.1-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.1-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1477: cat stderr ./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -169095,10 +170161,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1448: cat stderr - -543. calc.at:1459: ok -./calc.at:1446: "$PERL" -pi -e 'use strict; +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1476: $PREPARSER ./calc input +stderr: +input: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1468: cat stderr +stderr: +stderr: +1.3: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1468: $PREPARSER ./calc input +stderr: +stderr: +1.3: syntax error +error: null divisor +./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169110,7 +170194,18 @@ ' expout || exit 77 stderr: stdout: -./calc.at:1446: cat stderr +stderr: +error: null divisor +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1476: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -169122,19 +170217,7 @@ || /\t/ )' calc.cc -./calc.at:1468: cat stderr -input: - | (#) + (#) = 2222 -./calc.at:1448: $PREPARSER ./calc input -input: -stderr: - -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' input: -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1446: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -169149,135 +170232,10 @@ | 2^2^3 = 256 | (2^2)^3 = 64 ./calc.at:1476: $PREPARSER ./calc input -input: -stderr: - | (1 + #) = 1111 -./calc.at:1468: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -stderr: +./calc.at:1476: cat stderr +./calc.at:1477: cat stderr stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; +./calc.at:1468: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169288,139 +170246,65 @@ }eg ' expout || exit 77 stderr: -556. calc.at:1482: testing Calculator C++ %glr-parser %debug ... -./calc.at:1482: mv calc.y.tmp calc.y +input: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (#) + (#) = 2222 +stdout: +./calc.at:1476: $PREPARSER ./calc input + | error +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1469: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc stderr: stderr: syntax error: invalid character: '#' -./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1468: cat stderr input: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Stack now 0 4 12 -Error: popping nterm exp (1.2: 1) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Next token is token invalid token (1.6: ) -Error: discarding token invalid token (1.6: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-6: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Error: popping token error (1.2-6: ) -Stack now 0 4 -Shifting token error (1.2-8: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Error: popping token error (1.2-8: ) -Stack now 0 4 -Shifting token error (1.2-10: ) -Entering state 11 -Stack now 0 4 11 -Reading a token -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Stack now 0 4 11 25 -Reducing stack by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Stack now 0 8 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Stack now 0 8 18 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Stack now 0 8 18 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) | 1 2 ./calc.at:1476: $PREPARSER ./calc input stderr: -557. calc.at:1482: testing Calculator glr2.cc %debug ... -./calc.at:1482: mv calc.y.tmp calc.y - + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +stderr: +./calc.at:1469: $PREPARSER ./calc input +544. calc.at:1468: ok +stderr: syntax error -./calc.at:1448: cat stderr +1.1: syntax error ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1446: "$PERL" -pi -e 'use strict; +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169430,9 +170314,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: -./calc.at:1468: "$PERL" -pi -e 'use strict; +syntax error + +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169442,160 +170327,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -./calc.at:1446: cat stderr -input: - | (1 + #) = 1111 -./calc.at:1448: $PREPARSER ./calc input input: -stderr: - | (1 + 1) / (1 - 1) -./calc.at:1446: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: cat stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) + | 1 2 +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1477: cat stderr stderr: ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -169607,170 +170342,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr input: -./calc.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (# + 1) = 1111 -1.6: syntax error: invalid character: '#' -./calc.at:1468: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1477: $PREPARSER ./calc input stderr: stderr: +input: +syntax error +1.7: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1476: $PREPARSER ./calc input ./calc.at:1476: cat stderr -Starting parse -Entering state 0 -Stack now 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Stack now 0 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Stack now 0 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Stack now 0 4 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Stack now 0 4 12 20 -Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Stack now 0 4 12 20 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Stack now 0 4 12 20 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Stack now 0 4 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Stack now 0 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Stack now 0 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Stack now 0 8 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Stack now 0 8 22 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Stack now 0 8 22 4 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Stack now 0 8 22 4 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Stack now 0 8 22 4 12 19 -Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) -Entering state 1 -Stack now 0 8 22 4 12 19 1 -Reducing stack by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Stack now 0 8 22 4 12 19 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Stack now 0 8 22 4 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Stack now 0 8 22 4 12 26 -Reducing stack by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Stack now 0 8 22 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Stack now 0 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Stack now 0 8 24 -Reducing stack by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Stack now 0 7 -Reducing stack by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Stack now 0 6 -Reading a token -Next token is token end of input (2.1: ) -Shifting token end of input (2.1: ) -Entering state 16 -Stack now 0 6 16 -Stack now 0 6 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error: invalid character: '#' -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: +1.7: syntax error stderr: syntax error: invalid character: '#' -./calc.at:1446: "$PERL" -pi -e 'use strict; +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169781,16 +170374,10 @@ }eg ' expout || exit 77 input: +syntax error: invalid character: '#' | 1//2 -./calc.at:1482: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS ./calc.at:1476: $PREPARSER ./calc input -./calc.at:1446: cat stderr -stderr: -./calc.at:1448: cat stderr -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -534. calc.at:1446: ok -./calc.at:1468: "$PERL" -pi -e 'use strict; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169800,17 +170387,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -input: - | (# + 1) = 1111 +559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose ... +./calc.at:1485: mv calc.y.tmp calc.y -./calc.at:1448: $PREPARSER ./calc input -./calc.at:1482: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1468: cat stderr stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -169821,16 +170404,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1469: cat stderr input: stderr: - | (1 + # + 1) = 1111 -./calc.at:1468: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' + | 1//2 +syntax error +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1477: cat stderr stderr: -syntax error: invalid character: '#' -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error ./calc.at:1476: cat stderr -./calc.at:1448: "$PERL" -pi -e 'use strict; +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +syntax error +input: + | + | +1 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169840,18 +170432,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (# + 1) = 1111 +./calc.at:1476: $PREPARSER ./calc input +stderr: +2.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error: invalid character: '#' -input: - | error -./calc.at:1476: $PREPARSER ./calc input -558. calc.at:1485: testing Calculator C++ %glr-parser parse.error=detailed %debug %name-prefix "calc" %verbose ... -./calc.at:1485: mv calc.y.tmp calc.y - -./calc.at:1448: cat stderr +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./calc.at:1468: "$PERL" -pi -e 'use strict; +stderr: +2.1: syntax error +stdout: +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169861,14 +170454,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -syntax error -stdout: -./calc.at:1468: cat stderr -./calc.at:1469: "$PERL" -ne ' +./calc.at:1476: cat stderr +./calc.at:1478: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -169879,17 +170466,9 @@ || /\t/ )' calc.cc -input: - | (1 + # + 1) = 1111 -./calc.at:1448: $PREPARSER ./calc input stderr: -input: -input: - | (1 + 1) / (1 - 1) -./calc.at:1468: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169899,9 +170478,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -error: null divisor -./calc.at:1468: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | error +./calc.at:1476: $PREPARSER ./calc input | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -169915,16 +170496,69 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 +./calc.at:1469: cat stderr +./calc.at:1478: $PREPARSER ./calc input +stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1477: cat stderr +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +syntax error + | error ./calc.at:1469: $PREPARSER ./calc input +./calc.at:1477: $PREPARSER ./calc /dev/null stderr: stderr: -error: null divisor stderr: +syntax error +./calc.at:1476: cat stderr ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: $PREPARSER ./calc input +input: +stderr: +stderr: +stderr: +1.1: syntax error + | (1 + # + 1) = 1111 +syntax error +./calc.at:1476: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1476: cat stderr -1.6: syntax error: invalid character: '#' stderr: -./calc.at:1448: "$PERL" -pi -e 'use strict; +stderr: +1.3: syntax error +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169934,11 +170568,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error: invalid character: '#' input: -input: - | 1 2 -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1468: "$PERL" -pi -e 'use strict; +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169951,25 +170593,49 @@ | 1 = 2 = 3 ./calc.at:1476: $PREPARSER ./calc input stderr: -stderr: -./calc.at:1485: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1448: cat stderr +./calc.at:1477: cat stderr syntax error ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1468: cat stderr stderr: +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1477: $PREPARSER ./calc input syntax error +./calc.at:1469: cat stderr +./calc.at:1478: cat stderr +input: +stderr: + | 1 = 2 = 3 +./calc.at:1469: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error input: - | (1 + 1) / (1 - 1) -./calc.at:1448: $PREPARSER ./calc input -544. calc.at:1468: ok stderr: -1.11-17: error: null divisor -./calc.at:1448: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr +syntax error + | 1//2 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -169981,7 +170647,13 @@ }eg ' expout || exit 77 stderr: -./calc.at:1469: "$PERL" -pi -e 'use strict; +input: +1.3: syntax error + | (1 + 1) / (1 - 1) +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: $PREPARSER ./calc input +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -169991,11 +170663,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.11-17: error: null divisor -./calc.at:1469: cat stderr +syntax error +stderr: +stderr: +1.3: syntax error +error: null divisor ./calc.at:1476: cat stderr -input: -./calc.at:1448: "$PERL" -pi -e 'use strict; +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170005,26 +170681,52 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1//2 -./calc.at:1469: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | | +1 +error: null divisor ./calc.at:1476: $PREPARSER ./calc input +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +input: syntax error -./calc.at:1448: cat stderr -stderr: -syntax error + | (!!) + (1 2) = 1 ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: $PREPARSER ./calc input +stderr: stderr: -535. calc.at:1448: ok +./calc.at:1478: cat stderr syntax error -./calc.at:1469: "$PERL" -pi -e 'use strict; +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: cat stderr +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +input: +input: +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170034,6 +170736,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | error +./calc.at:1478: $PREPARSER ./calc input + | + | +1 +./calc.at:1469: $PREPARSER ./calc input ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170044,25 +170751,76 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -559. calc.at:1485: testing Calculator glr2.cc parse.error=detailed %debug %name-prefix "calc" %verbose ... -./calc.at:1485: mv calc.y.tmp calc.y - ./calc.at:1476: cat stderr -./calc.at:1476: $PREPARSER ./calc /dev/null stderr: -./calc.at:1485: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +stderr: +1.1: syntax error syntax error - -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +546. calc.at:1476: ok +./calc.at:1477: cat stderr +./calc.at:1476: cat stderr stderr: +stderr: +1.1: syntax error +syntax error +./calc.at:1476: $PREPARSER ./calc /dev/null input: + | (- *) + (1 2) = 1 +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: $PREPARSER ./calc input +stderr: syntax error - | error -./calc.at:1469: $PREPARSER ./calc input +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1478: cat stderr syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1469: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1469: $PREPARSER ./calc /dev/null +stderr: ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170074,12 +170832,36 @@ }eg ' expout || exit 77 stderr: +1.7: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.7: syntax error +stderr: +./calc.at:1477: cat stderr syntax error +./calc.at:1476: cat stderr 560. calc.at:1486: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1486: mv calc.y.tmp calc.y +input: ./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1476: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1478: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170090,17 +170872,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +stderr: +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1476: $PREPARSER ./calc input +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: syntax error syntax error syntax error syntax error error: 4444 != 1 +input: ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1478: $PREPARSER ./calc input +stderr: +stderr: +2.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1469: cat stderr stderr: syntax error @@ -170108,12 +170905,30 @@ syntax error syntax error error: 4444 != 1 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: - | 1 = 2 = 3 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1469: $PREPARSER ./calc input stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170124,15 +170939,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: syntax error +syntax error +syntax error +syntax error +error: 4444 != 1 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr stderr: -./calc.at:1476: cat stderr +./calc.at:1478: cat stderr stdout: -input: - | (!!) + (1 2) = 1 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -ne ' +stderr: +./calc.at:1480: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -170143,25 +170961,22 @@ || /\t/ )' calc.cc -./calc.at:1486: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1478: $PREPARSER ./calc /dev/null +input: syntax error -error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +syntax error +syntax error +error: 4444 != 1 + | 1 + 2 * 3 + !+ ++ +./calc.at:1477: $PREPARSER ./calc input +stderr: +./calc.at:1476: cat stderr stderr: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -syntax error -error: 2222 != 1 | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -170175,16 +170990,12 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1469: cat stderr stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input input: - | - | +1 -./calc.at:1469: $PREPARSER ./calc input stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170194,29 +171005,94 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 input: - | 1 2 +./calc.at:1476: $PREPARSER ./calc input +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error + | 1 + 2 * 3 + !- ++ ./calc.at:1477: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1486: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS stderr: -./calc.at:1476: cat stderr syntax error +error: 2222 != 1 stderr: -1.3: syntax error +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (- *) + (1 2) = 1 stderr: -./calc.at:1476: $PREPARSER ./calc input -1.3: syntax error +./calc.at:1478: cat stderr + | 1 2 +./calc.at:1480: $PREPARSER ./calc input +syntax error +error: 2222 != 1 +stderr: +./calc.at:1469: cat stderr +stderr: +syntax error, unexpected number +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1478: $PREPARSER ./calc input +input: +stderr: + | (!!) + (1 2) = 1 +syntax error, unexpected number +stderr: +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error +error: 2222 != 1 +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1477: cat stderr syntax error error: 2222 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; +./calc.at:1476: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170226,7 +171102,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1480: cat stderr + | (- *) + (1 2) = 1 +./calc.at:1476: $PREPARSER ./calc input + | (#) + (#) = 2222 +stderr: +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170236,13 +171130,47 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error +syntax error +error: 2222 != 1 +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: +./calc.at:1478: cat stderr syntax error syntax error error: 2222 != 1 + | 1//2 +stderr: +./calc.at:1480: $PREPARSER ./calc input ./calc.at:1469: cat stderr -./calc.at:1477: cat stderr -./calc.at:1469: $PREPARSER ./calc /dev/null +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +stderr: +input: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' + | (!!) + (1 2) = 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: $PREPARSER ./calc input +input: + | (- *) + (1 2) = 1 +./calc.at:1469: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170254,22 +171182,51 @@ }eg ' expout || exit 77 stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error +syntax error +error: 2222 != 1 ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1477: cat stderr +stderr: stderr: - | 1//2 -./calc.at:1477: $PREPARSER ./calc input ./calc.at:1476: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 syntax error -stderr: -1.3: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +error: 2222 != 1 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: input: | (* *) + (*) + (*) ./calc.at:1476: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr + | (1 + #) = 1111 +./calc.at:1477: $PREPARSER ./calc input stderr: -1.3: syntax error stderr: ./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -170285,7 +171242,24 @@ syntax error syntax error ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | error stderr: +./calc.at:1478: cat stderr +./calc.at:1480: $PREPARSER ./calc input +syntax error +syntax error +syntax error +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1469: cat stderr +syntax error, unexpected invalid token +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: ./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170296,15 +171270,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -syntax error -syntax error -./calc.at:1469: cat stderr -input: -./calc.at:1477: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (* *) + (*) + (*) + | (- *) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input ./calc.at:1469: $PREPARSER ./calc input stderr: +stderr: +syntax error, unexpected invalid token +stderr: ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170318,37 +171291,49 @@ syntax error syntax error syntax error -syntax error -error: 4444 != 1 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | error -./calc.at:1477: $PREPARSER ./calc input +./calc.at:1477: cat stderr stderr: syntax error syntax error syntax error -syntax error -error: 4444 != 1 -stdout: stderr: +input: ./calc.at:1476: cat stderr -./calc.at:1478: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -1.1: syntax error +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 + | (# + 1) = 1111 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.2: syntax error: invalid character: '#' ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -input: +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170359,28 +171344,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1478: $PREPARSER ./calc input +./calc.at:1480: cat stderr +1.2: syntax error: invalid character: '#' | 1 + 2 * 3 + !+ ++ ./calc.at:1476: $PREPARSER ./calc input -1.1: syntax error -stderr: stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 = 2 = 3 +./calc.at:1480: $PREPARSER ./calc input ./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170391,42 +171362,67 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +syntax error, unexpected '=' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1469: cat stderr stderr: stderr: input: +syntax error, unexpected '=' ./calc.at:1477: cat stderr - | 1 2 + | (* *) + (*) + (*) input: ./calc.at:1478: $PREPARSER ./calc input -stderr: input: | 1 + 2 * 3 + !- ++ ./calc.at:1476: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ ./calc.at:1469: $PREPARSER ./calc input stderr: +stderr: input: stderr: + | (1 + # + 1) = 1111 ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 = 2 = 3 ./calc.at:1477: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 -stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.3: syntax error -1.7: syntax error stderr: +stderr: +./calc.at:1480: cat stderr +1.6: syntax error: invalid character: '#' ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -error: 2222 != 1 stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; +stderr: +1.6: syntax error: invalid character: '#' +1.2: syntax error +1.10: syntax error +1.16: syntax error +input: +input: + | + | +1 +./calc.at:1480: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1469: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170436,7 +171432,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.7: syntax error +stderr: ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170447,7 +171443,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; +syntax error, unexpected '+' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170457,8 +171455,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '+' +stderr: +./calc.at:1477: cat stderr +./calc.at:1476: cat stderr ./calc.at:1478: cat stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; +input: +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170468,26 +171474,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1//2 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1477: cat stderr -./calc.at:1476: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1477: $PREPARSER ./calc input stderr: -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: input: -stderr: -1.3: syntax error -./calc.at:1469: cat stderr - | - | +1 -./calc.at:1477: $PREPARSER ./calc input +1.11-17: error: null divisor +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (#) + (#) = 2222 ./calc.at:1476: $PREPARSER ./calc input -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; +input: +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170497,28 +171493,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -2.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1478: $PREPARSER ./calc input +stderr: +./calc.at:1480: cat stderr stderr: -input: syntax error: invalid character: '#' syntax error: invalid character: '#' ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (- *) + (1 2) = 1 -./calc.at:1469: $PREPARSER ./calc input -stderr: +1.11-17: error: null divisor +./calc.at:1469: cat stderr +./calc.at:1480: $PREPARSER ./calc /dev/null +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -2.1: syntax error -syntax error -syntax error -error: 2222 != 1 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1478: cat stderr -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: stderr: +syntax error, unexpected end of input ./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170529,14 +171519,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error -syntax error -error: 2222 != 1 - | error +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +stderr: +stderr: +syntax error, unexpected end of input +input: + | (#) + (#) = 2222 +./calc.at:1469: $PREPARSER ./calc input +stderr: +input: +./calc.at:1477: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ ./calc.at:1478: $PREPARSER ./calc input stderr: -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170547,9 +171548,22 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +548. calc.at:1477: ok +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr stderr: -1.1: syntax error ./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170560,25 +171574,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1477: $PREPARSER ./calc /dev/null -stdout: -stderr: ./calc.at:1476: cat stderr -./calc.at:1469: cat stderr -1.1: syntax error -./calc.at:1480: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc +input: +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170589,41 +171589,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: +./calc.at:1480: $PREPARSER ./calc input | (1 + #) = 1111 stderr: ./calc.at:1476: $PREPARSER ./calc input -1.1: syntax error - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1480: $PREPARSER ./calc input -input: -stderr: +./calc.at:1469: cat stderr stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr syntax error: invalid character: '#' - | (* *) + (*) + (*) ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: $PREPARSER ./calc input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1478: cat stderr +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +input: stderr: -syntax error -syntax error -syntax error + | (1 + #) = 1111 +syntax error: invalid character: '#' +./calc.at:1469: $PREPARSER ./calc input +input: +stderr: +syntax error: invalid character: '#' + | (#) + (#) = 2222 ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1478: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170634,30 +171636,39 @@ }eg ' expout || exit 77 stderr: -input: -stderr: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: syntax error: invalid character: '#' - | 1 = 2 = 3 -./calc.at:1478: $PREPARSER ./calc input -syntax error -syntax error -syntax error +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1480: cat stderr +./calc.at:1476: cat stderr input: - | 1 2 -./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | (!!) + (1 2) = 1 ./calc.at:1480: $PREPARSER ./calc input -stderr: -1.7: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected number -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose ... +./calc.at:1486: mv calc.y.tmp calc.y + input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -1.7: syntax error -./calc.at:1477: $PREPARSER ./calc input stderr: ./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -170669,7 +171680,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | (# + 1) = 1111 +./calc.at:1476: $PREPARSER ./calc input +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +stderr: +stderr: +./calc.at:1469: cat stderr +syntax error, unexpected number +error: 2222 != 1 +syntax error: invalid character: '#' +input: + | (1 + #) = 1111 +./calc.at:1478: $PREPARSER ./calc input +stderr: +input: + | (# + 1) = 1111 +./calc.at:1469: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170679,22 +171716,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +syntax error: invalid character: '#' +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected number -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1469: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170704,7 +171729,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1480: cat stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170714,8 +171743,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: + | (- *) + (1 2) = 1 +./calc.at:1480: $PREPARSER ./calc input +stderr: ./calc.at:1476: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170725,51 +171762,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: cat stderr -input: -input: ./calc.at:1478: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1469: $PREPARSER ./calc input - | (# + 1) = 1111 +stderr: +input: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 + | (1 + # + 1) = 1111 input: ./calc.at:1476: $PREPARSER ./calc input +./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | (# + 1) = 1111 +./calc.at:1478: $PREPARSER ./calc input stderr: -./calc.at:1477: cat stderr -stderr: - | 1//2 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1469: cat stderr syntax error: invalid character: '#' stderr: ./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '/', expecting number or '-' or '(' or '!' - | - | +1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +1.2: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1478: $PREPARSER ./calc input input: -stderr: syntax error: invalid character: '#' -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -stderr: -2.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input -input: -stderr: - | 1 + 2 * 3 + !- ++ + | (1 + # + 1) = 1111 ./calc.at:1469: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -2.1: syntax error -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170781,10 +171798,13 @@ ' expout || exit 77 stderr: stderr: +1.2: syntax error: invalid character: '#' +syntax error: invalid character: '#' ./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1480: cat stderr +stderr: +syntax error: invalid character: '#' +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170794,7 +171814,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1476: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170804,8 +171824,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: cat stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; +input: + | (* *) + (*) + (*) +./calc.at:1480: $PREPARSER ./calc input +stderr: +./calc.at:1478: cat stderr +./calc.at:1469: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170815,22 +171839,35 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1480: cat stderr -./calc.at:1478: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1476: cat stderr input: +stderr: | (1 + # + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc /dev/null -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1477: cat stderr +./calc.at:1478: $PREPARSER ./calc input +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1469: cat stderr stderr: -1.1: syntax error +input: +1.6: syntax error: invalid character: '#' ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1476: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1469: "$PERL" -pi -e 'use strict; +stderr: +error: null divisor +./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + 1) / (1 - 1) +./calc.at:1469: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170840,32 +171877,66 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error stderr: +error: null divisor +./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: null divisor +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +stderr: +error: null divisor +input: +./calc.at:1476: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: cat stderr + | 1 + 2 * 3 + !+ ++ ./calc.at:1480: $PREPARSER ./calc input -1.1: syntax error +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: stderr: - | (- *) + (1 2) = 1 + | (1 + 1) / (1 - 1) +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1469: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1476: cat stderr +stderr: +1.11-17: error: null divisor +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1480: $PREPARSER ./calc input +547. calc.at:1476: ok stderr: stderr: -./calc.at:1477: $PREPARSER ./calc input -syntax error: invalid character: '#' -syntax error, unexpected invalid token -stdout: ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor ./calc.at:1469: cat stderr -./calc.at:1476: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - stderr: ./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -170877,36 +171948,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected invalid token -input: -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1476: $PREPARSER ./calc input -stderr: - | (#) + (#) = 2222 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1469: $PREPARSER ./calc input + +545. calc.at:1469: ok ./calc.at:1478: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170916,13 +171961,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + +./calc.at:1480: cat stderr +550. calc.at:1478: ok +input: + | (#) + (#) = 2222 +./calc.at:1480: $PREPARSER ./calc input stderr: + +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose ... +./calc.at:1487: mv calc.y.tmp calc.y + syntax error: invalid character: '#' syntax error: invalid character: '#' -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170932,11 +171989,23 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1480: cat stderr +563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... +input: +./calc.at:1489: mv calc.y.tmp calc.y + +./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | (1 + #) = 1111 +./calc.at:1480: $PREPARSER ./calc input stderr: syntax error: invalid character: '#' -syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... +./calc.at:1489: mv calc.y.tmp calc.y + stderr: -input: +./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +syntax error: invalid character: '#' ./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -170947,26 +172016,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1478: $PREPARSER ./calc input ./calc.at:1480: cat stderr input: - | 1 2 -./calc.at:1476: $PREPARSER ./calc input +./calc.at:1489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | (# + 1) = 1111 +./calc.at:1480: $PREPARSER ./calc input stderr: +syntax error: invalid character: '#' +./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -input: -./calc.at:1477: cat stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +stderr: +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -170976,36 +172038,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + 1) / (1 - 1) -input: -stderr: -./calc.at:1476: $PREPARSER ./calc input -stderr: -input: -syntax error -stderr: - | 1 = 2 = 3 -stderr: -error: null divisor -./calc.at:1480: $PREPARSER ./calc input -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -stderr: - | (* *) + (*) + (*) -./calc.at:1477: $PREPARSER ./calc input -syntax error, unexpected '=' -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: "$PERL" -ne ' +./calc.at:1477: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -171016,10 +172050,43 @@ || /\t/ )' calc.cc -./calc.at:1469: cat stderr +./calc.at:1480: cat stderr +input: +./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1477: $PREPARSER ./calc input +input: + | (1 + # + 1) = 1111 stderr: +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; +syntax error: invalid character: '#' +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | 1 2 +./calc.at:1477: $PREPARSER ./calc input +syntax error: invalid character: '#' +stderr: +1.3: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.3: syntax error +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171029,16 +172096,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error, unexpected '=' -1.2: syntax error -1.10: syntax error -1.16: syntax error -error: null divisor -input: - | (1 + #) = 1111 -./calc.at:1469: $PREPARSER ./calc input -input: +./calc.at:1480: cat stderr ./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -171049,10 +172107,32 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: cat stderr +input: + | (1 + 1) / (1 - 1) +./calc.at:1480: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +error: null divisor +./calc.at:1477: cat stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +error: null divisor +input: + | 1//2 +./calc.at:1477: $PREPARSER ./calc input +input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -171066,23 +172146,12 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -syntax error: invalid character: '#' -./calc.at:1477: cat stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input +./calc.at:1479: $PREPARSER ./calc input +1.3: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: ./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -171093,7 +172162,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +./calc.at:1482: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1480: cat stderr +1.3: syntax error +stderr: +554. calc.at:1480: ok +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -171103,11 +172190,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: input: +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 + | 1 2 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1479: $PREPARSER ./calc input +stderr: +syntax error, unexpected number +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1477: cat stderr +stderr: stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 Starting parse Entering state 0 Reading a token @@ -171944,16 +173051,22 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | 1 + 2 * 3 + !+ ++ -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: -./calc.at:1476: cat stderr -1.11: syntax error -1.1-16: error: 2222 != 1 -stderr: +./calc.at:1478: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +input: +syntax error, unexpected number stderr: Starting parse Entering state 0 @@ -172042,115 +173155,1678 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (5) +Shifting token "number" (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token "number" (4) +Shifting token "number" (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (256) +Shifting token "number" (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (64) +Shifting token "number" (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + | error +./calc.at:1477: $PREPARSER ./calc input +stderr: +input: +input: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +1.1: syntax error +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +./calc.at:1479: cat stderr +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 2 +./calc.at:1478: $PREPARSER ./calc input +input: +stderr: + | 1//2 +./calc.at:1479: $PREPARSER ./calc input +1.3: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error +input: +stderr: +565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1491: mv calc.y.tmp calc.y + +syntax error, unexpected '/', expecting number or '-' or '(' or '!' + | 1 = 2 = 3 +./calc.at:1477: $PREPARSER ./calc input +stderr: +./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: cat stderr +1.7: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1//2 +./calc.at:1482: $PREPARSER ./calc input +stderr: +stderr: +1.7: syntax error +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1478: cat stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +input: + | 1//2 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +stderr: +./calc.at:1479: $PREPARSER ./calc input +stderr: +1.3: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected invalid token +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.3: syntax error +stderr: +syntax error, unexpected invalid token +./calc.at:1477: cat stderr +input: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | + | +1 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1482: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +2.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1479: cat stderr + | error +./calc.at:1482: $PREPARSER ./calc input +stderr: +2.1: syntax error +./calc.at:1478: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1491: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +input: + | 1 = 2 = 3 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | error +./calc.at:1478: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +stderr: +syntax error, unexpected '=' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.1: syntax error +./calc.at:1477: cat stderr +syntax error, unexpected '=' +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: $PREPARSER ./calc /dev/null +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.1: syntax error +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1482: cat stderr +1.1: syntax error +./calc.at:1478: cat stderr +./calc.at:1479: cat stderr +input: +input: + | 1 = 2 = 3 +./calc.at:1482: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () + | + | +1 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.7: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +stderr: +stderr: +syntax error, unexpected '+' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +stderr: +input: +1.7: syntax error +stderr: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +syntax error, unexpected '+' +stderr: +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1482: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1479: cat stderr + | + | +1 +input: +./calc.at:1482: $PREPARSER ./calc input + | + | +1 +./calc.at:1479: $PREPARSER ./calc /dev/null +./calc.at:1478: $PREPARSER ./calc input +stderr: +stderr: +./calc.at:1477: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +2.1: syntax error +syntax error, unexpected end of input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +input: + | (!!) + (1 2) = 1 +stderr: +./calc.at:1477: $PREPARSER ./calc input +stderr: +syntax error, unexpected end of input +stderr: +2.1: syntax error +1.11: syntax error +1.1-16: error: 2222 != 1 +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1480: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +1.11: syntax error +1.1-16: error: 2222 != 1 +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +stderr: +./calc.at:1482: cat stderr +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +./calc.at:1482: $PREPARSER ./calc /dev/null +input: +stderr: +./calc.at:1477: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1479: $PREPARSER ./calc input +stderr: +./calc.at:1478: $PREPARSER ./calc /dev/null +input: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 2 +stderr: +./calc.at:1480: $PREPARSER ./calc input +stderr: +input: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +stderr: +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.1: syntax error + | (- *) + (1 2) = 1 +syntax error, unexpected number +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +stderr: +stderr: +stderr: +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +1.1: syntax error +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +syntax error, unexpected number +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +./calc.at:1482: cat stderr +./calc.at:1480: cat stderr +./calc.at:1478: cat stderr +input: +input: +input: +input: + | (!!) + (1 2) = 1 +./calc.at:1479: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1477: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | 1//2 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1478: $PREPARSER ./calc input +stderr: +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 10 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Next token is token '=' () +Reading a token +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) --> $$ = nterm exp (5) -Entering state 10 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -172158,11 +174834,11 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 +Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token "number" (2) Shifting token "number" (2) @@ -172170,114 +174846,46 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1) - $2 = token '^' () + $2 = token '*' () $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '*' () +syntax error +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () @@ -172293,49 +174901,76 @@ Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) + $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) --> $$ = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +stderr: +syntax error, unexpected number +error: 2222 != 1 + | (* *) + (*) + (*) +./calc.at:1477: $PREPARSER ./calc input +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stderr: +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Starting parse +Entering state 0 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token ')' () +syntax error +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -172343,33 +174978,11 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -172377,47 +174990,18 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Entering state 29 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -172425,289 +175009,101 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Entering state 29 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) -> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token "number" (4) -Shifting token "number" (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) --> $$ = nterm exp (4) -Entering state 10 +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token ')' () +syntax error +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 12 +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (256) -Shifting token "number" (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) --> $$ = nterm exp (256) -Entering state 27 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token "number" (2) Shifting token "number" (2) @@ -172715,74 +175111,77 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (2) -> $$ = nterm exp (2) -Entering state 32 +Entering state 30 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () $3 = nterm exp (2) --> $$ = nterm exp (4) +-> $$ = nterm exp (2) Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (4) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 32 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) --> $$ = nterm exp (64) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token @@ -172791,107 +175190,21 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr -stderr: -input: -input: - | 1 2 -input: - | - | +1 -./calc.at:1482: $PREPARSER ./calc input -input: -./calc.at:1480: $PREPARSER ./calc input - | 1//2 - | 1 + 2 * 3 + !- ++ -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1476: $PREPARSER ./calc input -stderr: -546. calc.at:1476: ok -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: cat stderr -stderr: -stderr: -./calc.at:1478: cat stderr -syntax error, unexpected '+' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error +1.2: syntax error +1.10: syntax error +1.16: syntax error ./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -syntax error -syntax error, unexpected '+' -input: -input: - | (# + 1) = 1111 - | (- *) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1469: $PREPARSER ./calc input stderr: stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; +1.2: syntax error +1.18: syntax error +1.23: syntax error +1.41: syntax error +1.1-46: error: 4444 != 1 +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -172901,8 +175214,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -172912,11 +175224,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -172926,7 +175234,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1479: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -172936,8 +175245,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -172948,45 +175256,24 @@ }eg ' expout || exit 77 ./calc.at:1480: cat stderr -./calc.at:1477: cat stderr -./calc.at:1476: cat stderr -./calc.at:1480: $PREPARSER ./calc /dev/null -./calc.at:1482: cat stderr input: -stderr: -./calc.at:1469: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr - | (#) + (#) = 2222 -./calc.at:1477: $PREPARSER ./calc input -syntax error, unexpected end of input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -561. calc.at:1486: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" %verbose ... -./calc.at:1486: mv calc.y.tmp calc.y - + | (- *) + (1 2) = 1 +./calc.at:1482: cat stderr +./calc.at:1479: $PREPARSER ./calc input input: stderr: -./calc.at:1486: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y stderr: +./calc.at:1478: cat stderr | error -stderr: -./calc.at:1476: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -input: - | 1//2 -input: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1477: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input stdout: -stderr: +input: + | (!!) + (1 2) = 1 ./calc.at:1485: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" @@ -172998,53 +175285,250 @@ || /\t/ )' calc.cc - | (* *) + (*) + (*) ./calc.at:1482: $PREPARSER ./calc input -syntax error -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1469: cat stderr -syntax error, unexpected end of input stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected invalid token +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Entering state 12 Reading a token -Next token is token '/' () +Next token is token "number" (2) syntax error -Error: popping token '/' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1477: $PREPARSER ./calc input input: stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: + | (!!) + (1 2) = 1 +syntax error, unexpected invalid token +input: +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -173058,38 +175542,14 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -input: -stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 ./calc.at:1485: $PREPARSER ./calc input stderr: -./calc.at:1469: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -syntax error -./calc.at:1477: cat stderr +1.11: syntax error +1.1-16: error: 2222 != 1 stderr: +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -173099,13 +175559,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: Starting parse Entering state 0 Reading a token @@ -173942,26 +176395,13 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: - | (1 + #) = 1111 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1480: cat stderr ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; +1.11: syntax error +1.1-16: error: 2222 != 1 +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -173972,7 +176412,8 @@ }eg ' expout || exit 77 stderr: -input: +./calc.at:1477: $PREPARSER ./calc input +./calc.at:1479: cat stderr Starting parse Entering state 0 Reading a token @@ -174809,11 +177250,8 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1480: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' input: +stderr: ./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -174824,12 +177262,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1485: $PREPARSER ./calc input -stderr: -./calc.at:1476: cat stderr -./calc.at:1482: cat stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; +./calc.at:1480: cat stderr +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174839,13 +177274,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 + | 1 2 +./calc.at:1485: $PREPARSER ./calc input +input: stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (* *) + (*) + (*) Starting parse Entering state 0 Reading a token @@ -174861,28 +177295,17 @@ syntax error, unexpected number Error: popping nterm exp (1) Cleanup: discarding lookahead token number (2) +./calc.at:1479: $PREPARSER ./calc input ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: +stderr: +./calc.at:1478: cat stderr stderr: -./calc.at:1486: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -stderr: -input: -./calc.at:1478: cat stderr -input: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 Starting parse Entering state 0 Reading a token @@ -174898,29 +177321,10 @@ syntax error, unexpected number Error: popping nterm exp (1) Cleanup: discarding lookahead token number (2) - | error -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1469: cat stderr - | 1 = 2 = 3 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1477: cat stderr -stderr: -stderr: -input: -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1482: cat stderr stderr: -./calc.at:1469: $PREPARSER ./calc input -input: -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174930,12 +177334,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error input: - | (# + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '=' + | (- *) + (1 2) = 1 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (- *) + (1 2) = 1 +./calc.at:1482: $PREPARSER ./calc input +stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174945,29 +177357,137 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected '=' +./calc.at:1477: cat stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token "invalid token" () +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () syntax error -Cleanup: discarding lookahead token "invalid token" () - | 1 + 2 * 3 + !+ ++ -./calc.at:1485: cat stderr -./calc.at:1478: $PREPARSER ./calc input -stderr: -stderr: -error: null divisor -./calc.at:1469: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr -stderr: +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: -input: - | 1//2 -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: cat stderr +1.4: syntax error +1.12: syntax error +1.1-17: error: 2222 != 1 +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174978,17 +177498,140 @@ }eg ' expout || exit 77 input: -./calc.at:1485: $PREPARSER ./calc input -error: null divisor -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1478: $PREPARSER ./calc input +input: stderr: -1.2: syntax error: invalid character: '#' +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + | (#) + (#) = 2222 +./calc.at:1477: $PREPARSER ./calc input + | 1//2 +./calc.at:1485: $PREPARSER ./calc input stderr: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -174998,6 +177641,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1479: cat stderr Starting parse Entering state 0 Reading a token @@ -175020,20 +177664,11 @@ Cleanup: discarding lookahead token '/' () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | (!!) + (1 2) = 1 +./calc.at:1480: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: cat stderr Starting parse Entering state 0 Reading a token @@ -175054,12 +177689,13 @@ Error: popping token '/' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '/' () +input: stderr: -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1482: cat stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1469: "$PERL" -pi -e 'use strict; +./calc.at:1478: cat stderr +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175069,9 +177705,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1479: $PREPARSER ./calc input input: + | + | +1 +./calc.at:1480: $PREPARSER ./calc input stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175081,20 +177722,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr - | - | +1 -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1476: $PREPARSER ./calc input -input: -./calc.at:1469: cat stderr - | 1 = 2 = 3 -stderr: -./calc.at:1482: $PREPARSER ./calc input -syntax error +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1478: cat stderr +syntax error, unexpected '+' ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -175105,129 +177735,155 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1482: cat stderr stderr: stderr: -545. calc.at:1469: ok stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error +1.10: syntax error +1.16: syntax error +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '+' +./calc.at:1485: cat stderr +input: +./calc.at:1477: cat stderr + | (* *) + (*) + (*) +stderr: +input: +./calc.at:1482: $PREPARSER ./calc input +1.2: syntax error +1.10: syntax error +1.16: syntax error + | 1 + 2 * 3 + !- ++ +./calc.at:1479: $PREPARSER ./calc input +input: + | error input: +./calc.at:1485: $PREPARSER ./calc input + | (1 + #) = 1111 +stderr: +stderr: +./calc.at:1477: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' () -syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1485: cat stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1478: $PREPARSER ./calc input +Next token is token '*' () syntax error -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1480: cat stderr -stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -stderr: -input: -Starting parse -Entering state 0 +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' () +Next token is token '*' () syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | error -./calc.at:1485: $PREPARSER ./calc input -input: - - | (- *) + (1 2) = 1 -stderr: -./calc.at:1480: $PREPARSER ./calc input -stderr: +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -175235,9 +177891,7 @@ syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175247,7 +177901,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1476: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175257,149 +177913,120 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: cat stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr +1.6: syntax error: invalid character: '#' stderr: -./calc.at:1479: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: +./calc.at:1480: cat stderr Starting parse Entering state 0 Reading a token Next token is token invalid token () syntax error, unexpected invalid token Cleanup: discarding lookahead token invalid token () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 - | (1 + #) = 1111 -./calc.at:1478: $PREPARSER ./calc input -stderr: -input: -./calc.at:1482: cat stderr -1.6: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1476: cat stderr -stderr: - | (1 + 1) / (1 - 1) -1.6: syntax error: invalid character: '#' -./calc.at:1477: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: $PREPARSER ./calc /dev/null -562. calc.at:1487: testing Calculator glr2.cc parse.error=custom %debug %name-prefix "calc" %verbose ... -./calc.at:1487: mv calc.y.tmp calc.y - -./calc.at:1487: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: -stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -input: -stderr: -1.11-17: error: null divisor -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -1.11-17: error: null divisor -./calc.at:1485: cat stderr -input: Starting parse Entering state 0 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (* *) + (*) + (*) -stderr: -./calc.at:1480: $PREPARSER ./calc input -stderr: -./calc.at:1478: cat stderr -Starting parse -Entering state 0 +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () -> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -175407,15 +178034,30 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -input: -input: -syntax error - | 1 2 -./calc.at:1477: "$PERL" -pi -e 'use strict; +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1480: $PREPARSER ./calc /dev/null +stderr: +stderr: +stderr: +syntax error, unexpected end of input +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.6: syntax error: invalid character: '#' +./calc.at:1478: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175426,24 +178068,9 @@ }eg ' expout || exit 77 stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected end of input input: -./calc.at:1479: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1485: $PREPARSER ./calc input -stderr: -syntax error, unexpected number -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175453,13 +178080,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1477: cat stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: -1.2: syntax error: invalid character: '#' -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175469,11 +178090,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -548. calc.at:1477: ok -stderr: -syntax error, unexpected number -./calc.at:1476: cat stderr +./calc.at:1485: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1478: $PREPARSER ./calc input ./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -175484,7 +178103,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1477: cat stderr +stderr: +./calc.at:1482: cat stderr +./calc.at:1479: cat stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1480: cat stderr +input: + | 1 = 2 = 3 +./calc.at:1485: $PREPARSER ./calc input +input: +stderr: +input: +input: + | (#) + (#) = 2222 + | 1 + 2 * 3 + !+ ++ +./calc.at:1479: $PREPARSER ./calc input +stderr: + | (# + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1477: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -175514,20 +178152,89 @@ Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () +stderr: +input: ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: input: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +syntax error: invalid character: '#' +syntax error: invalid character: '#' + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1482: cat stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1478: $PREPARSER ./calc input +stderr: +1.2: syntax error: invalid character: '#' +stderr: +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -175557,15 +178264,94 @@ Error: popping token '=' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '=' () +stderr: +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +1.2: syntax error: invalid character: '#' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +stderr: +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +error: 4444 != 1 input: - | 1 + 2 * 3 + !+ ++ - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1482: $PREPARSER ./calc /dev/null -./calc.at:1487: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -./calc.at:1478: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175575,10 +178361,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -stderr: -stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !- ++ +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175588,53 +178372,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1479: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -./calc.at:1485: cat stderr -stderr: -stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -syntax error -syntax error -error: 4444 != 1 -stderr: -1.6: syntax error: invalid character: '#' -input: -input: -input: - | - | +1 -./calc.at:1485: $PREPARSER ./calc input - | 1//2 -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175644,13 +178393,76 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1485: cat stderr +./calc.at:1477: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1479: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -175660,6 +178472,91 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) + | (1 + # + 1) = 1111 +input: +./calc.at:1477: $PREPARSER ./calc input +input: + | + | +1 +./calc.at:1485: $PREPARSER ./calc input + | (1 + #) = 1111 +stderr: +./calc.at:1479: $PREPARSER ./calc input +input: +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +stderr: Starting parse Entering state 0 Reading a token @@ -175679,26 +178576,18 @@ syntax error, unexpected '+' Error: popping nterm input () Cleanup: discarding lookahead token '+' () + | (!!) + (1 2) = 1 +./calc.at:1480: $PREPARSER ./calc input ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected number +error: 2222 != 1 ./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -input: - | (1 + 1) / (1 - 1) -./calc.at:1478: $PREPARSER ./calc input stderr: +input: stderr: Starting parse Entering state 0 @@ -175719,30 +178608,50 @@ syntax error, unexpected '+' Error: popping nterm input () Cleanup: discarding lookahead token '+' () + | (#) + (#) = 2222 +./calc.at:1478: $PREPARSER ./calc input +1.6: syntax error: invalid character: '#' stderr: -563. calc.at:1489: testing Calculator C++ %glr-parser parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... -./calc.at:1489: mv calc.y.tmp calc.y - +syntax error: invalid character: '#' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -1.11-17: error: null divisor +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' ./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: cat stderr -stdout: -./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1486: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1476: cat stderr +./calc.at:1477: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +1.2: syntax error: invalid character: '#' +1.8: syntax error: invalid character: '#' +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -175753,26 +178662,60 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1477: cat stderr +./calc.at:1482: cat stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1480: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr input: -1.11-17: error: null divisor - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 + | (1 + 1) / (1 - 1) +input: +./calc.at:1485: $PREPARSER ./calc /dev/null +./calc.at:1479: cat stderr +input: +./calc.at:1477: $PREPARSER ./calc input + | (#) + (#) = 2222 ./calc.at:1482: $PREPARSER ./calc input -./calc.at:1480: cat stderr stderr: + | (- *) + (1 2) = 1 +./calc.at:1480: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.11-17: error: null divisor +./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1478: cat stderr +stderr: +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 Starting parse Entering state 0 Reading a token @@ -175780,10 +178723,15 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token ')' () -syntax error +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 @@ -175802,61 +178750,14 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -175868,33 +178769,99 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | (# + 1) = 1111 +1.11-17: error: null divisor +./calc.at:1479: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +stderr: +stderr: +input: +stdout: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +stderr: + | (1 + #) = 1111 +./calc.at:1478: $PREPARSER ./calc input +syntax error: invalid character: '#' +stderr: +./calc.at:1486: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -175906,15 +178873,8 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 @@ -175923,45 +178883,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error -Error: popping token '*' () -Error: popping nterm exp (2) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -175977,36 +178904,35 @@ Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (4444) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -176020,8 +178946,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1477: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -176031,9 +178956,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: $PREPARSER ./calc input +stderr: input: -./calc.at:1479: "$PERL" -pi -e 'use strict; +stderr: +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -176043,19 +178972,56 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (!!) + (1 2) = 1 -stderr: -./calc.at:1476: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1480: $PREPARSER ./calc input -stderr: -syntax error -error: 2222 != 1 + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1486: $PREPARSER ./calc input stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr +1.6: syntax error: invalid character: '#' +./calc.at:1477: cat stderr +./calc.at:1480: cat stderr +./calc.at:1485: cat stderr stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -176892,259 +179858,11 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; +input: +549. calc.at:1477: ok +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -177154,26 +179872,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./calc.at:1479: cat stderr -./calc.at:1477: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - + | (* *) + (*) + (*) +./calc.at:1480: $PREPARSER ./calc input + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1485: $PREPARSER ./calc input stderr: -syntax error -error: 2222 != 1 -550. calc.at:1478: ok stderr: +./calc.at:1479: cat stderr stderr: -./calc.at:1485: cat stderr +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -178010,437 +180720,6 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1485: $PREPARSER ./calc /dev/null -input: -input: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () - | error -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1486: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - -stderr: -syntax error, unexpected invalid token -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -stderr: -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1482: cat stderr -stderr: -stderr: -syntax error, unexpected invalid token -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -input: - | 1 2 -input: -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1480: cat stderr -./calc.at:1489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS - | (- *) + (1 2) = 1 -input: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: $PREPARSER ./calc input - | (!!) + (1 2) = 1 -./calc.at:1482: $PREPARSER ./calc input -stderr: -./calc.at:1485: cat stderr -stderr: -stderr: -1.3: syntax error -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error -syntax error -error: 2222 != 1 -input: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -stderr: -stderr: -./calc.at:1480: $PREPARSER ./calc input -input: -./calc.at:1479: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error -syntax error -syntax error -error: 2222 != 1 -564. calc.at:1489: testing Calculator glr2.cc parse.error=verbose %debug %name-prefix "calc" api.token.prefix={TOK_} %verbose ... - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: cat stderr -./calc.at:1485: $PREPARSER ./calc input -stderr: -./calc.at:1489: mv calc.y.tmp calc.y - -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: Starting parse Entering state 0 Reading a token @@ -178688,53 +180967,15 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () - | 1 = 2 = 3 -./calc.at:1479: $PREPARSER ./calc input ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | 1//2 -./calc.at:1486: $PREPARSER ./calc input -stderr: -syntax error, unexpected '=' -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stderr: + | 1 2 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: cat stderr -syntax error: invalid character: '#' -stderr: +./calc.at:1486: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -178983,6 +181224,10 @@ Cleanup: popping token end of file () Cleanup: popping nterm input () ./calc.at:1482: cat stderr +input: +./calc.at:1478: cat stderr +stderr: + Starting parse Entering state 0 Reading a token @@ -178994,35 +181239,19 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () +Next token is token "number" (2) +syntax error, unexpected number Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -syntax error, unexpected '=' -input: - | 1//2 -input: -./calc.at:1477: $PREPARSER ./calc input +Cleanup: discarding lookahead token "number" (2) + | (1 + # + 1) = 1111 +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: $PREPARSER ./calc input input: - | (* *) + (*) + (*) -./calc.at:1476: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stderr: +syntax error: invalid character: '#' + | (# + 1) = 1111 +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -179033,10 +181262,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -1.3: syntax error -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -179047,59 +181272,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: cat stderr -stderr: -./calc.at:1486: cat stderr +input: stderr: stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +1.2: syntax error: invalid character: '#' + | (1 + #) = 1111 +./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: cat stderr +./calc.at:1485: cat stderr +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () @@ -179113,13 +181315,18 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token "number" (2) -syntax error +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -179131,40 +181338,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -179178,42 +181377,17 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -syntax error -syntax error -syntax error -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: cat stderr -input: -./calc.at:1479: cat stderr input: -1.3: syntax error - | (# + 1) = 1111 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +1.2: syntax error: invalid character: '#' + | 1 + 2 * 3 + !+ ++ ./calc.at:1480: $PREPARSER ./calc input -stderr: - | error -stderr: -./calc.at:1486: $PREPARSER ./calc input -stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -syntax error -syntax error -syntax error input: stderr: | (!!) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -179221,42 +181395,6 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 @@ -179265,13 +181403,18 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token "number" (2) -syntax error +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -179283,40 +181426,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -179330,25 +181465,28 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: $PREPARSER ./calc input - | - | +1 -stderr: -./calc.at:1479: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1477: cat stderr +./calc.at:1478: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -syntax error, unexpected '+' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -179460,25 +181598,11 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179488,8 +181612,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc $LIBS -stderr: Starting parse Entering state 0 Reading a token @@ -179601,8 +181723,10 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -syntax error, unexpected '+' -./calc.at:1480: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !- ++ +./calc.at:1480: $PREPARSER ./calc input +input: +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179612,9 +181736,44 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: cat stderr + | 1//2 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1478: cat stderr +stderr: +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +stderr: +566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1491: mv calc.y.tmp calc.y + +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -./calc.at:1486: "$PERL" -pi -e 'use strict; +stderr: +input: + | (1 + 1) / (1 - 1) +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179624,144 +181783,36 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | error -./calc.at:1476: cat stderr -./calc.at:1482: cat stderr -./calc.at:1477: $PREPARSER ./calc input -input: | (1 + # + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input -stderr: -1.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1486: cat stderr - | (* *) + (*) + (*) -stderr: -input: -./calc.at:1482: $PREPARSER ./calc input -stderr: -stderr: -1.1: syntax error Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - | 1 + 2 * 3 + !+ ++ -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1482: cat stderr +stderr: +error: null divisor +stderr: +./calc.at:1485: cat stderr +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179771,7 +181822,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: "$PERL" -pi -e 'use strict; +1.6: syntax error: invalid character: '#' +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (# + 1) = 1111 +./calc.at:1480: cat stderr +input: +./calc.at:1482: $PREPARSER ./calc input +error: null divisor +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -179781,10 +181842,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error: invalid character: '#' stderr: -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (- *) + (1 2) = 1 +1.6: syntax error: invalid character: '#' +./calc.at:1485: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -179792,42 +181853,18 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token error () +Error: discarding token error () Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Error: discarding token '+' () Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () Entering state 11 @@ -179839,54 +181876,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 Reading a token -Next token is token '*' () -syntax error -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) -> $$ = nterm exp (1111) -Entering state 29 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -179900,80 +181915,10 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -input: -./calc.at:1485: cat stderr - | 1 = 2 = 3 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1479: cat stderr -stderr: -syntax error: invalid character: '#' +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -input: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr input: -./calc.at:1479: $PREPARSER ./calc /dev/null - | 1 + 2 * 3 + !- ++ -./calc.at:1476: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1485: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected end of input -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: Starting parse Entering state 0 Reading a token @@ -180091,43 +182036,86 @@ Cleanup: popping token end of file () Cleanup: popping nterm input () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1480: $PREPARSER ./calc input +stderr: +stderr: +stderr: +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1477: cat stderr -stderr: -stderr: -./calc.at:1482: cat stderr -stderr: -stderr: -input: -./calc.at:1480: "$PERL" -pi -e 'use strict; +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + | error +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180137,8 +182125,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected end of input -stdout: +./calc.at:1486: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -180255,20 +182242,7 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () - | 1 = 2 = 3 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180278,106 +182252,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: cat stderr stderr: -input: - | 1 + 2 * 3 + !+ ++ -1.7: syntax error -input: -./calc.at:1486: cat stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: $PREPARSER ./calc input +syntax error: invalid character: '#' +syntax error: invalid character: '#' stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1476: "$PERL" -pi -e 'use strict; +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180387,13 +182274,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1480: $PREPARSER ./calc input -input: -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1478: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180404,134 +182286,15 @@ }eg ' expout || exit 77 stderr: -stderr: - | - | +1 -./calc.at:1486: $PREPARSER ./calc input -stderr: -./calc.at:1479: cat stderr -1.7: syntax error Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -error: null divisor -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () ./calc.at:1485: cat stderr -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: - | 1 + 2 * 3 + !- ++ -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -error: null divisor -input: - | 1 2 -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; +552. calc.at:1479: ok +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -180541,91 +182304,20 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) input: -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1476: cat stderr + | (1 + 1) / (1 - 1) +./calc.at:1478: $PREPARSER ./calc input +./calc.at:1480: cat stderr input: -./calc.at:1479: $PREPARSER ./calc input | (* *) + (*) + (*) ./calc.at:1485: $PREPARSER ./calc input -syntax error, unexpected number -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +./calc.at:1482: cat stderr stderr: -input: + +1.11-17: error: null divisor +./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -180636,6 +182328,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +input: Starting parse Entering state 0 Reading a token @@ -180751,83 +182444,16 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 stderr: -syntax error, unexpected number -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1486: cat stderr +1.11-17: error: null divisor stderr: - | (#) + (#) = 2222 stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +input: +syntax error: invalid character: '#' Starting parse Entering state 0 Reading a token @@ -180943,108 +182569,12 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: cat stderr -./calc.at:1477: cat stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1485: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: $PREPARSER ./calc /dev/null - | - | +1 -./calc.at:1477: $PREPARSER ./calc input -554. calc.at:1480: ok -./calc.at:1482: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -input: -stderr: -stderr: -2.1: syntax error -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () -./calc.at:1479: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr -input: -stderr: - | (#) + (#) = 2222 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 ./calc.at:1482: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () input: stderr: stderr: -stderr: - | (!!) + (1 2) = 1 -2.1: syntax error -./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -181052,40 +182582,33 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -181096,39 +182619,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -181142,77 +182658,11 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +syntax error: invalid character: '#' ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -./calc.at:1480: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1476: "$PERL" -pi -e 'use strict; + | 1 = 2 = 3 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1478: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -181222,8 +182672,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -181233,12 +182682,38 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 Starting parse Entering state 0 Reading a token @@ -181246,40 +182721,33 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -181290,39 +182758,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -181336,9 +182797,65 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1485: cat stderr +./calc.at:1480: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1478: cat stderr stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +input: +551. calc.at:1478: ok +./calc.at:1480: cat stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: Starting parse Entering state 0 Reading a token @@ -181404,7 +182921,9 @@ $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1476: cat stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1480: $PREPARSER ./calc input ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -181416,46 +182935,8 @@ }eg ' expout || exit 77 stderr: + stderr: -./calc.at:1486: cat stderr -syntax error, unexpected number -error: 2222 != 1 -input: -./calc.at:1477: cat stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1485: $PREPARSER ./calc input -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1477: $PREPARSER ./calc /dev/null -input: -stderr: -1.1: syntax error -input: -stderr: - | (1 + #) = 1111 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1476: $PREPARSER ./calc input - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -181513,269 +182994,102 @@ Shifting token '!' () Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): $1 = token '!' () - $2 = token '-' () + $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +input: +./calc.at:1486: cat stderr +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1492: mv calc.y.tmp calc.y + +./calc.at:1482: cat stderr +./calc.at:1485: $PREPARSER ./calc input stderr: stderr: -1.1: syntax error -syntax error: invalid character: '#' -./calc.at:1479: cat stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token Next token is token '*' () Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +syntax error: invalid character: '#' +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +input: +./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y + | (1 + 1) / (1 - 1) + | + | +1 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -181842,38 +183156,6 @@ $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (- *) + (1 2) = 1 -565. calc.at:1491: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1491: mv calc.y.tmp calc.y - -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: $PREPARSER ./calc input -syntax error: invalid character: '#' -stderr: -./calc.at:1482: cat stderr -./calc.at:1480: cat stderr Starting parse Entering state 0 Reading a token @@ -181881,28 +183163,6 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 @@ -181923,102 +183183,26 @@ -> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '+' () +Next token is token ')' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token Next token is token '(' () Shifting token '(' () @@ -182032,82 +183216,48 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 30 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (0) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -182122,42 +183272,28 @@ Cleanup: popping token "end of input" () Cleanup: popping nterm input () stderr: -./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + #) = 1111 -./calc.at:1482: $PREPARSER ./calc input -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1477: cat stderr -./calc.at:1485: cat stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -182177,51 +183313,89 @@ Shifting token '+' () Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -182235,37 +183409,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - | error -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: $PREPARSER ./calc input -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -syntax error, unexpected invalid token -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1476: cat stderr -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: cat stderr - | (#) + (#) = 2222 -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -182276,73 +183420,18 @@ }eg ' expout || exit 77 stderr: -./calc.at:1485: $PREPARSER ./calc input -stderr: +./calc.at:1480: cat stderr +568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1492: mv calc.y.tmp calc.y + Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 Next token is token '\n' () Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () -> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -182350,20 +183439,52 @@ -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr +./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y input: + | (1 + # + 1) = 1111 +./calc.at:1480: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 input: +stderr: + | (#) + (#) = 2222 +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -182462,16 +183583,10 @@ Cleanup: popping token end of file () Cleanup: popping nterm input () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected invalid token -./calc.at:1479: cat stderr - | (!!) + (1 2) = 1 - | (# + 1) = 1111 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1486: $PREPARSER ./calc input -input: - | (* *) + (*) + (*) -./calc.at:1479: $PREPARSER ./calc input +./calc.at:1486: cat stderr stderr: +syntax error: invalid character: '#' +./calc.at:1482: cat stderr stderr: Starting parse Entering state 0 @@ -182570,265 +183685,23 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' +./calc.at:1486: $PREPARSER ./calc /dev/null stderr: +556. calc.at:1482: ok Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1477: cat stderr -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (!!) + (1 2) = 1 -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -182839,7 +183712,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -182849,15 +183722,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -stderr: +./calc.at:1492: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS + ./calc.at:1480: cat stderr ./calc.at:1485: cat stderr -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -182868,42 +183736,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr -./calc.at:1486: cat stderr input: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: - | 1 = 2 = 3 + | (1 + 1) / (1 - 1) input: ./calc.at:1480: $PREPARSER ./calc input | (1 + #) = 1111 +./calc.at:1486: cat stderr ./calc.at:1485: $PREPARSER ./calc input -1.11: syntax error -1.1-16: error: 2222 != 1 -input: stderr: +error: null divisor stderr: - | (- *) + (1 2) = 1 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: Starting parse Entering state 0 @@ -182983,14 +183826,12 @@ Cleanup: popping token end of file () Cleanup: popping nterm input () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1486: $PREPARSER ./calc input - | (# + 1) = 1111 -syntax error, unexpected '=' -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: cat stderr stderr: stderr: +error: null divisor +stderr: Starting parse Entering state 0 Reading a token @@ -182998,11 +183839,33 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -183010,90 +183873,88 @@ Shifting token '+' () Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) - $2 = token '=' () + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -stderr: -syntax error, unexpected '=' -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -183104,9 +183965,16 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token @@ -183122,13 +183990,37 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 +Reading a token +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -183144,10 +184036,10 @@ Reading a token Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) + $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () @@ -183163,17 +184055,17 @@ Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -183195,25 +184087,33 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -183224,16 +184124,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -183242,24 +184142,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1476: cat stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS stderr: Starting parse Entering state 0 @@ -183268,23 +184166,9 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -183312,15 +184196,53 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -183332,16 +184254,60 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -183349,67 +184315,39 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 27 +Entering state 12 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - | 1 + 2 * 3 + !+ ++ -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 30 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -183421,32 +184359,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -183460,8 +184406,9 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -input: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1494: mv calc.y.tmp calc.y + ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -183472,9 +184419,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (1 + # + 1) = 1111 -./calc.at:1476: $PREPARSER ./calc input -./calc.at:1477: cat stderr ./calc.at:1480: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -183485,29 +184429,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: +./calc.at:1480: cat stderr +./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y ./calc.at:1485: cat stderr -input: -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1479: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -syntax error: invalid character: '#' -./calc.at:1486: cat stderr -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -183517,25 +184442,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -./calc.at:1480: cat stderr +555. calc.at:1480: ok input: -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' | (# + 1) = 1111 ./calc.at:1485: $PREPARSER ./calc input -input: -stderr: -./calc.at:1482: cat stderr stderr: - | (* *) + (*) + (*) -./calc.at:1486: $PREPARSER ./calc input +./calc.at:1486: cat stderr Starting parse Entering state 0 Reading a token @@ -183606,10 +184518,7 @@ Cleanup: popping token end of file () Cleanup: popping nterm input () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 + stderr: Starting parse Entering state 0 @@ -183681,6 +184590,8 @@ Cleanup: popping token end of file () Cleanup: popping nterm input () input: + | (!!) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -183689,19 +184600,20 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -183719,12 +184631,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 @@ -183738,52 +184659,38 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -183798,14 +184705,7 @@ Cleanup: popping token "end of input" () Cleanup: popping nterm input () ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 -./calc.at:1480: $PREPARSER ./calc input -input: - | (1 + # + 1) = 1111 -stderr: -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1477: "$PERL" -pi -e 'use strict; +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -183815,8 +184715,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected '+' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -183825,46 +184723,20 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -183872,15 +184744,8 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 @@ -183889,80 +184754,6 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '+' -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 @@ -183971,24 +184762,13 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () +Next token is token "number" (2) +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 @@ -184000,76 +184780,18 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) - $2 = token '=' () + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -./calc.at:1485: cat stderr -stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -184077,63 +184799,21 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -184147,16 +184827,9 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1479: cat stderr - | (* *) + (*) + (*) -./calc.at:1476: cat stderr -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1486: cat stderr +./calc.at:1485: cat stderr input: - | (1 + # + 1) = 1111 -stderr: -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -184166,14 +184839,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -1.2: syntax error -1.10: syntax error -1.16: syntax error -input: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: + | (1 + # + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1494: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS stderr: - | (#) + (#) = 2222 Starting parse Entering state 0 Reading a token @@ -184257,101 +184926,10 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1479: $PREPARSER ./calc input -input: +./calc.at:1486: cat stderr ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1486: $PREPARSER ./calc input -stderr: -./calc.at:1480: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1476: $PREPARSER ./calc input -1.2: syntax error -1.10: syntax error -1.16: syntax error -stderr: -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: Starting parse Entering state 0 Reading a token @@ -184435,272 +185013,51 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1480: $PREPARSER ./calc /dev/null -error: null divisor -stderr: -./calc.at:1476: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error, unexpected end of input -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' -input: -error: null divisor - | 1 + 2 * 3 + !- ++ + | (- *) + (1 2) = 1 ./calc.at:1486: $PREPARSER ./calc input stderr: -./calc.at:1482: cat stderr -stderr: -syntax error, unexpected end of input +570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... +./calc.at:1494: mv calc.y.tmp calc.y + Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1477: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + 1) / (1 - 1) -stderr: -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1485: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 +Entering state 2 Reading a token Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -input: -./calc.at:1476: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 @@ -184713,54 +185070,36 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -184768,38 +185107,21 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -184813,7 +185135,9 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -184823,64 +185147,45 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1479: cat stderr -input: -stderr: -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -184894,9 +185199,36 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -184904,38 +185236,21 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -184949,27 +185264,21 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1478: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - +./calc.at:1485: cat stderr +input: +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | (1 + 1) / (1 - 1) ./calc.at:1485: $PREPARSER ./calc input -input: -./calc.at:1476: cat stderr -input: -stderr: -input: stderr: - | (1 + #) = 1111 -./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -185085,61 +185394,7 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () - | 1 + 2 * 3 + !- ++ ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: $PREPARSER ./calc input -stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1480: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -stderr: -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: -stderr: -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: cat stderr stderr: Starting parse @@ -185257,26 +185512,10 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -syntax error: invalid character: '#' -547. calc.at:1476: ok -./calc.at:1482: cat stderr -stderr: -stderr: -stderr: -input: -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -error: 4444 != 1 input: - | 1 2 -./calc.at:1478: $PREPARSER ./calc input -556. calc.at:1482: ok - | (#) + (#) = 2222 + | (* *) + (*) + (*) ./calc.at:1486: $PREPARSER ./calc input stderr: -stderr: Starting parse Entering state 0 Reading a token @@ -185284,12 +185523,15 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -185311,12 +185553,12 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -185330,37 +185572,52 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -185374,20 +185631,6 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - -1.3: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -185398,18 +185641,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.3: syntax error +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: cat stderr stderr: Starting parse Entering state 0 @@ -185418,12 +185651,15 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -185445,12 +185681,12 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -185464,37 +185700,52 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -185508,39 +185759,8 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: cat stderr -./calc.at:1485: cat stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr - -./calc.at:1480: cat stderr -./calc.at:1478: cat stderr -input: 558. calc.at:1485: ok - | (# + 1) = 1111 -input: -./calc.at:1479: $PREPARSER ./calc input - | (#) + (#) = 2222 -./calc.at:1477: $PREPARSER ./calc input +./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -185551,258 +185771,282 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -input: -input: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + ./calc.at:1486: cat stderr -stderr: - | (!!) + (1 2) = 1 -./calc.at:1480: $PREPARSER ./calc input - | 1//2 -./calc.at:1478: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -566. calc.at:1491: testing Calculator glr2.cc %locations %header parse.error=verbose %debug %name-prefix "calc" %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -1.3: syntax error input: - -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: mv calc.y.tmp calc.y - -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -stderr: - | (1 + #) = 1111 -stderr: -stderr: + | 1 + 2 * 3 + !+ ++ ./calc.at:1486: $PREPARSER ./calc input -1.3: syntax error -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -./calc.at:1491: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: cat stderr -./calc.at:1478: cat stderr +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) input: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | 1 + 2 * 3 + !- ++ +./calc.at:1486: $PREPARSER ./calc input stderr: - | (1 + # + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +571. calc.at:1504: testing Calculator lalr1.d ... +./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -stderr: -./calc.at:1480: cat stderr -syntax error: invalid character: '#' -567. calc.at:1492: testing Calculator C++ %glr-parser %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: mv calc.y.tmp calc.y - -./calc.at:1477: cat stderr -input: - | error -./calc.at:1478: $PREPARSER ./calc input +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -185813,63 +186057,53 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (- *) + (1 2) = 1 -stderr: -./calc.at:1480: $PREPARSER ./calc input -input: -stderr: -./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -568. calc.at:1492: testing Calculator glr2.cc %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... - | (1 + #) = 1111 -./calc.at:1477: $PREPARSER ./calc input -stderr: ./calc.at:1486: cat stderr -syntax error: invalid character: '#' stderr: -./calc.at:1492: mv calc.y.tmp calc.y - -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -1.6: syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -1.1: syntax error +stdout: stderr: +./calc.at:1479: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stdout: input: - | (# + 1) = 1111 -stderr: -./calc.at:1492: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 +./calc.at:1489: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + | (#) + (#) = 2222 ./calc.at:1486: $PREPARSER ./calc input -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' +input: +input: stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -185884,12 +186118,6 @@ Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -185902,49 +186130,9 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: cat stderr -./calc.at:1491: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -185957,12 +186145,6 @@ Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -185973,32 +186155,39 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -186012,93 +186201,23 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1479: $PREPARSER ./calc input -stderr: -error: null divisor -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr -input: -./calc.at:1477: cat stderr - | 1 = 2 = 3 -./calc.at:1478: $PREPARSER ./calc input -stderr: -error: null divisor -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: cat stderr -input: - | (# + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input -1.7: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (* *) + (*) + (*) -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + # + 1) = 1111 -./calc.at:1486: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1479: cat stderr -1.7: syntax error -stderr: -stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1492: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -1.2: syntax error: invalid character: '#' stderr: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -186106,33 +186225,13 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -186145,102 +186244,21 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -552. calc.at:1479: ok -./calc.at:1492: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -stdout: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -186251,32 +186269,39 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -186290,57 +186315,6 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1478: cat stderr -./calc.at:1482: $PREPARSER ./calc input - -./calc.at:1480: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1477: cat stderr -stderr: Starting parse Entering state 0 Reading a token @@ -187177,32 +187151,11 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1477: $PREPARSER ./calc input -input: - | - | +1 -input: -stderr: - | 1 + 2 * 3 + !+ ++ -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +571. calc.at:1504: ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1504) stderr: -1.6: syntax error: invalid character: '#' -2.1: syntax error - | (1 + 1) / (1 - 1) -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -188040,27 +187993,47 @@ Cleanup: popping token "end of input" () Cleanup: popping nterm input () stderr: +input: + | 1 2 +./calc.at:1489: $PREPARSER ./calc input +input: + +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 2 stderr: -stderr: -2.1: syntax error +./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +syntax error, unexpected number +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1486: cat stderr +Starting parse +Entering state 0 Reading a token Next token is token "number" (1) Shifting token "number" (1) @@ -188068,28 +188041,20 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token "number" (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token "number" (2) +stderr: +syntax error, unexpected number +input: + | (1 + #) = 1111 +./calc.at:1486: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () @@ -188103,48 +188068,55 @@ -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -188158,10 +188130,9 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -input: ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1477: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -188171,14 +188142,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 + 2 * 3 + !- ++ -./calc.at:1480: $PREPARSER ./calc input - | 1 2 -./calc.at:1482: $PREPARSER ./calc input -stderr: -stderr: -stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -188198,89 +188161,51 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -188294,7 +188219,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -188304,49 +188229,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr -stderr: -569. calc.at:1494: testing Calculator C++ %glr-parser %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1494: mv calc.y.tmp calc.y - -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -input: -./calc.at:1478: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1477: $PREPARSER ./calc input -./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -stderr: +./calc.at:1489: cat stderr +./calc.at:1479: cat stderr ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -188357,104 +188241,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: $PREPARSER ./calc /dev/null -1.11-17: error: null divisor -./calc.at:1477: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.1: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: cat stderr -./calc.at:1486: cat stderr -stderr: -1.11-17: error: null divisor -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -1.1: syntax error -560. calc.at:1486: ok -./calc.at:1477: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: - | (#) + (#) = 2222 -stdout: -./calc.at:1479: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -./calc.at:1480: $PREPARSER ./calc input -./calc.at:1482: cat stderr -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1477: cat stderr -stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' input: input: - | 1//2 -./calc.at:1482: $PREPARSER ./calc input - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: cat stderr +572. calc.at:1509: testing Calculator D ... + | 1//2 ./calc.at:1479: $PREPARSER ./calc input -549. calc.at:1477: ok +./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y stderr: Starting parse Entering state 0 @@ -188472,18 +188267,18 @@ Entering state 22 Reading a token Next token is token '/' () -syntax error +syntax error, unexpected '/', expecting number or '-' or '(' or '!' Error: popping token '/' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '/' () -./calc.at:1478: cat stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (# + 1) = 1111 +./calc.at:1486: $PREPARSER ./calc input stderr: -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1494: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -188500,148 +188295,87 @@ Entering state 22 Reading a token Next token is token '/' () -syntax error +syntax error, unexpected '/', expecting number or '-' or '(' or '!' Error: popping token '/' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '/' () -./calc.at:1478: $PREPARSER ./calc input -stderr: -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: - -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -input: -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -stderr: -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1480: cat stderr -stderr: -1.2: syntax error -1.18: syntax error -1.23: syntax error -1.41: syntax error -1.1-46: error: 4444 != 1 -syntax error, unexpected number -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -570. calc.at:1494: testing Calculator glr2.cc %no-lines %locations %header parse.error=verbose %debug api.prefix={calc} %verbose %parse-param {semantic_value *result}{int *count}{int *nerrs} ... -./calc.at:1494: mv calc.y.tmp calc.y - -stderr: -input: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (1 + #) = 1111 -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: $PREPARSER ./calc input -syntax error, unexpected number -stderr: -syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' -./calc.at:1494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.cc calc.y -./calc.at:1478: cat stderr -./calc.at:1482: cat stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (!!) + (1 2) = 1 -./calc.at:1480: cat stderr - | error -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1482: $PREPARSER ./calc input -571. calc.at:1504: testing Calculator lalr1.d ... -./calc.at:1504: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y stderr: -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () -./calc.at:1479: cat stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (# + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input -stderr: -stderr: -stderr: -1.11: syntax error -1.1-16: error: 2222 != 1 -input: +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token syntax error: invalid character: '#' -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token "invalid token" () -syntax error -Cleanup: discarding lookahead token "invalid token" () - | 1//2 -./calc.at:1479: $PREPARSER ./calc input -stderr: +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -syntax error: invalid character: '#' stderr: syntax error, unexpected '/', expecting number or '-' or '(' or '!' stdout: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -ne ' +stderr: +./calc.at:1482: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -188652,19 +188386,76 @@ || /\t/ )' calc.cc -stderr: -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -./calc.at:1482: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -188675,7 +188466,8 @@ }eg ' expout || exit 77 input: -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1479: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -188698,30 +188490,17 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1494: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o calc calc.cc calc-lex.cc calc-main.cc $LIBS -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: cat stderr -./calc.at:1478: cat stderr -./calc.at:1480: cat stderr +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1486: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -188729,11 +188508,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -188741,23 +188520,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -188767,16 +188546,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -188785,21 +188564,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -188807,11 +188586,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -188823,29 +188602,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -188859,22 +188638,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -188883,12 +188662,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -188897,11 +188676,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -188911,11 +188690,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -188923,23 +188702,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -188952,22 +188731,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -188976,12 +188755,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -188995,16 +188774,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -189012,7 +188791,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -189023,16 +188802,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -189042,16 +188821,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -189060,13 +188839,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) - $2 = to./calc.at:1479: cat stderr -ken '\n' () + $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189075,11 +188853,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189097,28 +188875,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -189131,22 +188909,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -189155,12 +188933,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189169,21 +188947,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -189191,16 +188969,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -189210,16 +188988,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -189233,22 +189011,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -189257,22 +189035,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -189284,11 +189062,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -189296,16 +189074,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -189314,7 +189092,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -189322,7 +189100,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -189332,16 +189110,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -189350,12 +189128,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189364,21 +189142,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -189386,11 +189164,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -189398,23 +189176,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -189424,16 +189202,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -189442,12 +189220,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189457,11 +189235,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -189469,16 +189247,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -189487,7 +189265,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -189498,16 +189276,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -189517,16 +189295,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -189535,43 +189313,134 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - | (- *) + (1 2) = 1 -./calc.at:1478: $PREPARSER ./calc input | (1 + # + 1) = 1111 -./calc.at:1480: $PREPARSER ./calc input +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1489: cat stderr input: + | error +./calc.at:1479: $PREPARSER ./calc input stderr: stderr: -input: - | 1 = 2 = 3 -./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -189579,11 +189448,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -189591,23 +189460,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -189617,16 +189486,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (7) +Shifting token "number" (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -189635,21 +189504,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -189657,11 +189526,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -189673,29 +189542,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -189709,22 +189578,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (5) +Shifting token "number" (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -189733,12 +189602,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189747,11 +189616,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189761,11 +189630,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -189773,23 +189642,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -189802,22 +189671,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -189826,12 +189695,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189845,16 +189714,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -189862,7 +189731,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -189873,16 +189742,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -189892,16 +189761,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -189910,12 +189779,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189924,11 +189793,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -189946,28 +189815,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -189980,22 +189849,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -190004,12 +189873,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -190018,21 +189887,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -190040,16 +189909,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -190059,16 +189928,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -190082,22 +189951,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (4) +Shifting token "number" (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -190106,22 +189975,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -190133,11 +190002,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -190145,16 +190014,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -190163,7 +190032,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -190171,7 +190040,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -190181,16 +190050,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -190199,12 +190068,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -190213,21 +190082,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -190235,11 +190104,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -190247,23 +190116,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -190273,16 +190142,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (256) +Shifting token "number" (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -190291,12 +190160,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -190306,11 +190175,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -190318,16 +190187,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -190336,7 +190205,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -190347,16 +190216,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -190366,16 +190235,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (64) +Shifting token "number" (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -190384,73 +190253,119 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -stderr: - | error -./calc.at:1479: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '=' () -syntax error -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () stderr: +input: +input: +572. calc.at:1509: | 1 2 syntax error, unexpected invalid token +./calc.at:1482: $PREPARSER ./calc input ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' + skipped (calc.at:1509) + | error stderr: -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 +./calc.at:1489: $PREPARSER ./calc input stderr: -./calc.at:1485: $PREPARSER ./calc input -571. calc.at:1504: 1.4: syntax error -1.12: syntax error -1.1-17: error: 2222 != 1 +syntax error, unexpected invalid token Starting parse Entering state 0 Reading a token @@ -190462,30 +190377,20 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () syntax error -Error: popping nterm exp (2) -Error: popping token '=' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () - skipped (calc.at:1504) +Cleanup: discarding lookahead token "number" (2) +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () syntax error, unexpected invalid token -stderr: -syntax error: invalid character: '#' -./calc.at:1478: "$PERL" -pi -e 'use strict; +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -190500,18 +190405,25 @@ Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) +Cleanup: discarding lookahead token "number" (2) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" () ./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -190522,8 +190434,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1480: "$PERL" -pi -e 'use strict; +./calc.at:1486: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -190533,11 +190445,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1478: cat stderr ./calc.at:1479: cat stderr -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; +input: + | (1 + 1) / (1 - 1) +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -190547,98 +190458,114 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1486: $PREPARSER ./calc input +stderr: +input: +./calc.at:1489: cat stderr +./calc.at:1482: cat stderr Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -input: -input: -input: - | (* *) + (*) + (*) - | (1 + 1) / (1 - 1) -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1480: $PREPARSER ./calc input - | 1 = 2 = 3 -stderr: -./calc.at:1479: $PREPARSER ./calc input -stderr: -1.2: syntax error -1.10: syntax error -1.16: syntax error -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: null divisor -./calc.at:1480: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1482: cat stderr -syntax error, unexpected '=' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -error: null divisor -syntax error, unexpected '=' -1.2: syntax error -1.10: syntax error -1.16: syntax error -input: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -./calc.at:1480: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: cat stderr -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -572. calc.at:1509: testing Calculator D ... -./calc.at:1509: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -input: - | 1//2 -Starting parse -Entering state 0 +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () -> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -190646,56 +190573,122 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1480: cat stderr -./calc.at:1478: cat stderr -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: $PREPARSER ./calc input +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 = 2 = 3 +./calc.at:1479: $PREPARSER ./calc input stderr: +stderr: +syntax error, unexpected '=' +input: +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 Reading a token Next token is token '/' () Shifting token '/' () Entering state 22 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -555. calc.at:1480: ok -stderr: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () -> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -190703,20 +190696,26 @@ -> $$ = nterm input () Entering state 6 Reading a token -Next token is token '+' () -syntax error -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1479: cat stderr +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +input: + | 1//2 +./calc.at:1482: $PREPARSER ./calc input + | 1 = 2 = 3 +./calc.at:1489: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -190725,21 +190724,44 @@ Entering state 22 Reading a token Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' +syntax error Error: popping token '/' () Error: popping nterm exp (1) Cleanup: discarding lookahead token '/' () -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1478: $PREPARSER ./calc input -input: - | - | +1 -stderr: -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; +syntax error, unexpected '=' +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -190749,16 +190771,62 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected '+' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -572. calc.at:1509: stderr: - stderr: -syntax error, unexpected '+' - skipped (calc.at:1509) stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () stdout: -./calc.at:1489: "$PERL" -ne ' +./calc.at:1486: cat stderr +573. calc.at:1510: testing Calculator D %locations ... +./calc.at:1485: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -190769,7 +190837,30 @@ || /\t/ )' calc.cc -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +560. calc.at:1486: ok +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -190780,11 +190871,6 @@ }eg ' expout || exit 77 input: - | 1 + 2 * 3 + !- ++ -./calc.at:1482: cat stderr -./calc.at:1478: $PREPARSER ./calc input -input: -./calc.at:1485: cat stderr | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -190798,31 +190884,29 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1489: $PREPARSER ./calc input -stderr: -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1489: cat stderr +input: -./calc.at:1482: $PREPARSER ./calc /dev/null -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1482: cat stderr +input: stderr: -./calc.at:1479: cat stderr +stderr: + | + | +1 +./calc.at:1489: $PREPARSER ./calc input +syntax error, unexpected '+' Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -190830,11 +190914,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -190842,23 +190926,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -190868,16 +190952,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -190886,21 +190970,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -190908,11 +190992,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -190924,29 +191008,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -190960,22 +191044,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -190984,12 +191068,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -190998,11 +191082,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191012,11 +191096,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -191024,23 +191108,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -191053,22 +191137,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -191077,12 +191161,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191096,16 +191180,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -191113,7 +191197,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -191124,16 +191208,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -191143,16 +191227,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -191160,14 +191244,13 @@ Entering state 8 Next token is token '\n' () Shifting token '\n' () -Entering state 2stderr: -4 -Reducing stack 0 by rule 4 (line 75): +Entering state 24 +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191176,11 +191259,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191198,28 +191281,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -191232,22 +191315,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -191256,12 +191339,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191270,21 +191353,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -191292,16 +191375,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -191311,16 +191394,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -191334,22 +191417,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -191358,22 +191441,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -191385,11 +191468,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -191397,16 +191480,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -191415,7 +191498,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -191423,7 +191506,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -191433,16 +191516,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -191451,12 +191534,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191465,21 +191548,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -191487,11 +191570,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -191499,23 +191582,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -191525,16 +191608,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -191543,12 +191626,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191558,11 +191641,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -191570,16 +191653,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -191588,7 +191671,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -191599,16 +191682,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -191618,16 +191701,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -191636,46 +191719,57 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | error stderr: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Now at end of input. -syntax error -Cleanup: discarding lookahead token "end of input" () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | error -./calc.at:1485: $PREPARSER ./calc input -stderr: -./calc.at:1479: $PREPARSER ./calc /dev/null -stderr: -stderr: +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -191683,11 +191777,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -191695,23 +191789,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -191721,16 +191815,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (7) -Shifting token "number" (7) +Next token is token number (7) +Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -191739,21 +191833,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -191761,11 +191855,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -191777,29 +191871,29 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -191813,22 +191907,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (5) -Shifting token "number" (5) +Next token is token number (5) +Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -191837,12 +191931,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191851,11 +191945,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191865,11 +191959,11 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token @@ -191877,23 +191971,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -191906,22 +192000,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -191930,12 +192024,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -191949,16 +192043,16 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -191966,7 +192060,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -191977,16 +192071,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -191996,16 +192090,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -192014,12 +192108,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -192028,11 +192122,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -192050,28 +192144,28 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -192084,22 +192178,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -192108,12 +192202,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -192122,21 +192216,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -192144,16 +192238,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -192163,16 +192257,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -192186,22 +192280,22 @@ Shifting token '-' () Entering state 2 Reading a token -Next token is token "number" (4) -Shifting token "number" (4) +Next token is token number (4) +Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -192210,22 +192304,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -192237,11 +192331,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -192249,16 +192343,16 @@ Shifting token '-' () Entering state 19 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -192267,7 +192361,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -192275,7 +192369,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -192285,16 +192379,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -192303,12 +192397,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -192317,21 +192411,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 Reading a token @@ -192339,11 +192433,11 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token @@ -192351,23 +192445,23 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -192377,16 +192471,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (256) -Shifting token "number" (256) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (256) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -192395,12 +192489,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -192410,11 +192504,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 Reading a token @@ -192422,16 +192516,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -192440,7 +192534,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -192451,16 +192545,16 @@ Shifting token '^' () Entering state 23 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -192470,16 +192564,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (64) -Shifting token "number" (64) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -192488,42 +192582,74 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () stderr: +syntax error, unexpected '+' +input: + | 1 2 +stderr: +./calc.at:1485: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token -Now at end of input. +Next token is token "invalid token" () syntax error -Cleanup: discarding lookahead token "end of input" () +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +573. calc.at:1510: Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -573. calc.at:1510: testing Calculator D %locations ... -./calc.at:1510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -syntax error, unexpected end of input -input: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: "$PERL" -pi -e 'use strict; + skipped (calc.at:1510) +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -192533,38 +192659,50 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | 1 2 -./calc.at:1489: $PREPARSER ./calc input -stderr: -syntax error, unexpected end of input stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token "number" (2) +Next token is token number (2) syntax error, unexpected number Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: discarding lookahead token number (2) +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" () +syntax error +Cleanup: discarding lookahead token "invalid token" () +./calc.at:1479: cat stderr + +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: $PREPARSER ./calc /dev/null 574. calc.at:1512: testing Calculator D parse.error=detailed api.prefix={calc} %verbose ... ./calc.at:1512: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1478: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1489: cat stderr +syntax error, unexpected end of input +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -192574,23 +192712,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token "number" (2) -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -192600,8 +192722,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1489: $PREPARSER ./calc /dev/null +./calc.at:1485: cat stderr +syntax error, unexpected end of input +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () input: -./calc.at:1485: "$PERL" -pi -e 'use strict; +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1//2 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: cat stderr +stderr: +stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -192611,17 +192749,89 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1478: $PREPARSER ./calc input -./calc.at:1479: cat stderr -573. calc.at:1510: stderr: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -./calc.at:1482: cat stderr -./calc.at:1485: cat stderr - skipped (calc.at:1510) -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: +stderr: + | 1 = 2 = 3 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1479: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -192632,10 +192842,43 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +574. calc.at:1512: input: +575. calc.at:1514: testing Calculator D %debug ... +./calc.at:1489: cat stderr +stderr: + skipped (calc.at:1512) | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1479: $PREPARSER ./calc input -stderr: -input: +./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () stderr: syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected ')', expecting number or '-' or '(' or '!' @@ -192644,21 +192887,26 @@ error: 4444 != 1 ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -1.2: syntax error: invalid character: '#' -1.8: syntax error: invalid character: '#' -574. calc.at:1512: ./calc.at:1489: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 stderr: +./calc.at:1489: $PREPARSER ./calc input syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected ')', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected '*', expecting number or '-' or '(' or '!' error: 4444 != 1 -./calc.at:1482: $PREPARSER ./calc input - | 1 = 2 = 3 - skipped (calc.at:1512) -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -192668,9 +192916,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - +./calc.at:1485: cat stderr stderr: -input: Starting parse Entering state 0 Reading a token @@ -192679,7 +192926,7 @@ Entering state 4 Reading a token Next token is token ')' () -syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token ')' () @@ -192751,7 +192998,7 @@ Entering state 20 Reading a token Next token is token ')' () -syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' () Error: popping nterm exp (3) Shifting token error () @@ -192782,7 +193029,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -192853,7 +193100,7 @@ Entering state 21 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' () Error: popping nterm exp (2) Shifting token error () @@ -192918,39 +193165,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1//2 -./calc.at:1489: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -192961,33 +193176,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1478: cat stderr -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | error +./calc.at:1485: $PREPARSER ./calc input stderr: +./calc.at:1482: cat stderr Starting parse Entering state 0 Reading a token @@ -192996,7 +193189,7 @@ Entering state 4 Reading a token Next token is token ')' () -syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token ')' () @@ -193068,7 +193261,7 @@ Entering state 20 Reading a token Next token is token ')' () -syntax error +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' () Error: popping nterm exp (3) Shifting token error () @@ -193099,7 +193292,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -193170,7 +193363,7 @@ Entering state 21 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Error: popping token '*' () Error: popping nterm exp (2) Shifting token error () @@ -193235,78 +193428,52 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +stderr: ./calc.at:1479: cat stderr Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | + | +1 +./calc.at:1482: $PREPARSER ./calc input +stderr: +575. calc.at:1514: Starting parse +Entering state 0 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () stderr: +input: + skipped (calc.at:1514) Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -input: -input: - | (1 + #) = 1111 -./calc.at:1478: $PREPARSER ./calc input -stderr: +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (!!) + (1 2) = 1 ./calc.at:1479: $PREPARSER ./calc input -1.6: syntax error: invalid character: '#' -stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -193316,10 +193483,31 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: +stderr: syntax error, unexpected number error: 2222 != 1 ./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -193329,21 +193517,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -575. calc.at:1514: testing Calculator D %debug ... -./calc.at:1514: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1485: cat stderr + ./calc.at:1489: cat stderr -1.6: syntax error: invalid character: '#' +stderr: syntax error, unexpected number error: 2222 != 1 -./calc.at:1482: cat stderr +./calc.at:1485: cat stderr +576. calc.at:1516: testing Calculator D parse.error=custom ... input: - | - | +1 -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y + | (!!) + (1 2) = 1 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -193353,43 +193538,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -576. calc.at:1516: testing Calculator D parse.error=custom ... -./calc.at:1516: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y stderr: input: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () - | error -./calc.at:1489: $PREPARSER ./calc input -stderr: -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1482: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () + | 1 = 2 = 3 +./calc.at:1485: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -193437,7 +193589,7 @@ Entering state 12 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -193503,7 +193655,36 @@ Cleanup: popping nterm input () ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -193514,35 +193695,9 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1478: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () +./calc.at:1482: cat stderr stderr: stderr: -./calc.at:1479: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" () Starting parse Entering state 0 Reading a token @@ -193590,7 +193745,7 @@ Entering state 12 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -193654,26 +193809,50 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1479: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1482: $PREPARSER ./calc /dev/null +stderr: input: -575. calc.at:1514: | (# + 1) = 1111 -input: -./calc.at:1478: $PREPARSER ./calc input - skipped (calc.at:1514) +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (- *) + (1 2) = 1 -stderr: ./calc.at:1479: $PREPARSER ./calc input -1.2: syntax error: invalid character: '#' -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -193684,7 +193863,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: "$PERL" -pi -e 'use strict; +576. calc.at:1516: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected number +error: 2222 != 1 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error +Cleanup: discarding lookahead token "end of input" () +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -193694,24 +193883,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -576. calc.at:1516: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected number -error: 2222 != 1 -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1485: cat stderr - -1.2: syntax error: invalid character: '#' skipped (calc.at:1516) -./calc.at:1489: cat stderr -./calc.at:1485: $PREPARSER ./calc /dev/null -stderr: stderr: +./calc.at:1489: cat stderr syntax error, unexpected '*', expecting number or '-' or '(' or '!' syntax error, unexpected number error: 2222 != 1 -./calc.at:1478: "$PERL" -pi -e 'use strict; +577. calc.at:1517: testing Calculator D %locations parse.error=custom ... +./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1485: cat stderr + +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -193721,60 +193903,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1482: cat stderr input: - | 1 = 2 = 3 -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +input: + | (- *) + (1 2) = 1 + | + | +1 +./calc.at:1485: $PREPARSER ./calc input ./calc.at:1489: $PREPARSER ./calc input +./calc.at:1482: cat stderr stderr: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of file -Cleanup: discarding lookahead token end of file () - | (- *) + (1 2) = 1 -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1478: cat stderr ./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -193785,38 +193923,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -input: Starting parse Entering state 0 Reading a token @@ -193829,7 +193935,7 @@ Entering state 2 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 9 Reducing stack 0 by rule 15 (line 106): @@ -193869,7 +193975,7 @@ Entering state 12 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -193933,21 +194039,33 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1479: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1478: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input +stderr: stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -193960,7 +194078,7 @@ Entering state 2 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 9 Reducing stack 0 by rule 15 (line 106): @@ -194000,7 +194118,7 @@ Entering state 12 Reading a token Next token is token "number" (2) -syntax error +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -194064,66 +194182,26 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () stderr: -1.6: syntax error: invalid character: '#' -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -577. calc.at:1517: testing Calculator D %locations parse.error=custom ... -./calc.at:1517: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -input: -./calc.at:1485: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -1.6: syntax error: invalid character: '#' - | (* *) + (*) + (*) -./calc.at:1479: $PREPARSER ./calc input -stderr: -./calc.at:1489: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -578. calc.at:1518: testing Calculator D %locations parse.error=detailed ... -input: -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -stderr: -./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1478: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1485: $PREPARSER ./calc input -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -input: -stderr: - | - | +1 Starting parse Entering state 0 Reading a token @@ -194132,13 +194210,13 @@ Entering state 4 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194153,11 +194231,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -194165,16 +194243,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -194184,16 +194262,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) $2 = token '+' () $3 = nterm exp (1) @@ -194204,7 +194282,7 @@ Entering state 20 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Error: popping token '+' () Error: popping nterm exp (3) Shifting token error () @@ -194212,7 +194290,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194220,7 +194298,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -194235,7 +194313,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () @@ -194252,7 +194330,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194260,7 +194338,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -194274,11 +194352,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -194286,16 +194364,16 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 30 Reading a token Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1) $2 = token '*' () $3 = nterm exp (2) @@ -194306,7 +194384,7 @@ Entering state 21 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Error: popping token '*' () Error: popping nterm exp (2) Shifting token error () @@ -194319,7 +194397,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194327,7 +194405,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) @@ -194337,16 +194415,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) @@ -194356,68 +194434,52 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1478: cat stderr -./calc.at:1482: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (* *) + (*) + (*) +./calc.at:1479: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () stderr: -Starting parse +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +577. calc.at:1517: Starting parse Entering state 0 Reading a token Next token is token '(' () @@ -194425,13 +194487,13 @@ Entering state 4 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194446,11 +194508,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -194458,16 +194520,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -194477,16 +194539,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2) $2 = token '+' () $3 = nterm exp (1) @@ -194497,7 +194559,7 @@ Entering state 20 Reading a token Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error Error: popping token '+' () Error: popping nterm exp (3) Shifting token error () @@ -194505,7 +194567,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194513,7 +194575,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -194528,7 +194590,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () @@ -194545,7 +194607,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194553,7 +194615,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -194567,11 +194629,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -194579,16 +194641,16 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 30 Reading a token Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1) $2 = token '*' () $3 = nterm exp (2) @@ -194599,7 +194661,7 @@ Entering state 21 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Error: popping token '*' () Error: popping nterm exp (2) Shifting token error () @@ -194612,7 +194674,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -194620,7 +194682,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) @@ -194630,16 +194692,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) @@ -194649,22 +194711,52 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1479: "$PERL" -pi -e 'use strict; + skipped (calc.at:1517) +./calc.at:1485: cat stderr +./calc.at:1489: cat stderr +stderr: +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +./calc.at:1485: $PREPARSER ./calc /dev/null +input: +stderr: + +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () + | (* *) + (*) + (*) +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: $PREPARSER ./calc input +578. calc.at:1518: testing Calculator D %locations parse.error=detailed ... +stderr: +./calc.at:1518: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of file +Cleanup: discarding lookahead token end of file () +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -194674,22 +194766,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1479: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1478: $PREPARSER ./calc input -577. calc.at:1517: input: -stderr: - | (* *) + (*) + (*) -input: -./calc.at:1482: $PREPARSER ./calc input - skipped (calc.at:1517) -1.11-17: error: null divisor -./calc.at:1478: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1479: $PREPARSER ./calc input -578. calc.at:1518: stderr: -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -194699,7 +194776,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: Starting parse Entering state 0 Reading a token @@ -194708,7 +194784,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -194738,7 +194814,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -194772,7 +194848,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -194815,25 +194891,8 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.11-17: error: null divisor -./calc.at:1489: cat stderr - skipped (calc.at:1518) - -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1485: cat stderr Starting parse Entering state 0 Reading a token @@ -194842,7 +194901,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -194872,7 +194931,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -194906,7 +194965,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -194949,8 +195008,9 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1489: $PREPARSER ./calc /dev/null -./calc.at:1478: "$PERL" -pi -e 'use strict; +./calc.at:1479: cat stderr +./calc.at:1482: cat stderr +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -194960,43 +195020,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () - -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: input: | (!!) + (1 2) = 1 - | 1 + 2 * 3 + !- ++ -./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1485: cat stderr ./calc.at:1479: $PREPARSER ./calc input stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" () -stderr: stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1478: cat stderr Starting parse Entering state 0 Reading a token @@ -195011,7 +195043,7 @@ Next token is token '!' () Shifting token '!' () Entering state 15 -Reducing stack 0 by rule 16 (line 120): +Reducing stack 0 by rule 16 (line 107): $1 = token '!' () $2 = token '!' () Shifting token error () @@ -195020,7 +195052,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195035,28 +195067,28 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195064,7 +195096,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -195074,16 +195106,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -195093,25 +195125,37 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: -551. calc.at:1478: ok + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1485: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -195126,7 +195170,7 @@ Next token is token '!' () Shifting token '!' () Entering state 15 -Reducing stack 0 by rule 16 (line 120): +Reducing stack 0 by rule 16 (line 107): $1 = token '!' () $2 = token '!' () Shifting token error () @@ -195135,7 +195179,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195150,28 +195194,28 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195179,7 +195223,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -195189,16 +195233,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -195208,200 +195252,25 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: cat stderr -579. calc.at:1519: testing Calculator D %locations parse.error=simple ... -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1482: $PREPARSER ./calc input -./calc.at:1489: cat stderr - -580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose ... -./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1485: cat stderr - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1479: cat stderr -./calc.at:1489: $PREPARSER ./calc input stderr: +579. calc.at:1519: testing Calculator D %locations parse.error=simple ... stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +./calc.at:1489: cat stderr Starting parse Entering state 0 Reading a token @@ -195416,7 +195285,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195431,11 +195300,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -195443,16 +195312,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -195462,16 +195331,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2) $2 = token '+' () $3 = nterm exp (1) @@ -195490,7 +195359,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195498,7 +195367,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -195530,7 +195399,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195538,7 +195407,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -195552,11 +195421,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -195564,16 +195433,16 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 30 Reading a token Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (1) $2 = token '*' () $3 = nterm exp (2) @@ -195597,7 +195466,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195605,7 +195474,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) @@ -195615,16 +195484,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) @@ -195634,31 +195503,40 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1519: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y input: - | (#) + (#) = 2222 input: -./calc.at:1479: $PREPARSER ./calc input -stderr: - | (- *) + (1 2) = 1 -./calc.at:1485: $PREPARSER ./calc input +578. calc.at:1518: stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1 + 2 * 3 + !- ++ + | 1 + 2 * 3 + !+ ++ +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1479: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -195673,7 +195551,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195688,11 +195566,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -195700,16 +195578,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -195719,16 +195597,16 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2) $2 = token '+' () $3 = nterm exp (1) @@ -195747,7 +195625,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195755,7 +195633,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -195787,7 +195665,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195795,7 +195673,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -195809,11 +195687,11 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -195821,16 +195699,16 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) -> $$ = nterm exp (2) Entering state 30 Reading a token Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (1) $2 = token '*' () $3 = nterm exp (2) @@ -195854,7 +195732,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195862,7 +195740,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (3333) $2 = token '+' () $3 = nterm exp (1111) @@ -195872,16 +195750,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (4444) $2 = token '=' () $3 = nterm exp (1) @@ -195891,26 +195769,246 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () + skipped (calc.at:1518) +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: cat stderr +stderr: stderr: + +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1489: $PREPARSER ./calc input + | (- *) + (1 2) = 1 ./calc.at:1482: $PREPARSER ./calc input stderr: -syntax error: invalid character: '#' -syntax error: invalid character: '#' +./calc.at:1485: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -195923,10 +196021,10 @@ Entering state 2 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 9 -Reducing stack 0 by rule 15 (line 119): +Reducing stack 0 by rule 15 (line 106): $1 = token '-' () $2 = token error () Shifting token error () @@ -195939,7 +196037,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195954,28 +196052,28 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token number (2) -syntax error, unexpected number +Next token is token "number" (2) +syntax error Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token "number" (2) +Error: discarding token "number" (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -195983,7 +196081,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -195993,16 +196091,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -196012,24 +196110,32 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -196095,8 +196201,11 @@ $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1479: cat stderr stderr: + | (!!) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -196109,10 +196218,10 @@ Entering state 2 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 9 -Reducing stack 0 by rule 15 (line 119): +Reducing stack 0 by rule 15 (line 106): $1 = token '-' () $2 = token error () Shifting token error () @@ -196125,6 +196234,133 @@ Next token is token ')' () Shifting token ')' () Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token "number" (2) +syntax error +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token "number" (2) +Error: discarding token "number" (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +579. calc.at:1519: input: + | (#) + (#) = 2222 +./calc.at:1479: $PREPARSER ./calc input + skipped (calc.at:1519) +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () @@ -196213,12 +196449,14 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: cat stderr stderr: -581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose ... stderr: -579. calc.at:1519: syntax error: invalid character: '#' -syntax error: invalid character: '#' -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -196228,86 +196466,126 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - skipped (calc.at:1519) Starting parse Entering state 0 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -580. calc.at:1520: skipped (calc.at:1520) -./calc.at:1489: cat stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +syntax error: invalid character: '#' +syntax error: invalid character: '#' +580. calc.at:1520: testing Calculator D parse.error=detailed %debug %verbose ... +./calc.at:1520: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: + | (#) + (#) = 2222 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1482: cat stderr ./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -196318,8 +196596,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -./calc.at:1482: "$PERL" -pi -e 'use strict; +stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -196329,12 +196607,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1485: cat stderr - | (!!) + (1 2) = 1 -./calc.at:1489: $PREPARSER ./calc input - -./calc.at:1479: cat stderr -stderr: Starting parse Entering state 0 Reading a token @@ -196342,20 +196614,16 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -196373,21 +196641,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -196412,20 +196671,19 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 + $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -196447,13 +196705,11 @@ Cleanup: popping token "end of input" () Cleanup: popping nterm input () ./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1479: cat stderr input: - | (* *) + (*) + (*) -./calc.at:1482: cat stderr -./calc.at:1485: $PREPARSER ./calc input -stderr: -input: +./calc.at:1485: cat stderr stderr: + | (* *) + (*) + (*) Starting parse Entering state 0 Reading a token @@ -196461,20 +196717,16 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -196492,21 +196744,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -196531,20 +196774,19 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (2222) +Shifting token "number" (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 + $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -196565,8 +196807,15 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1482: $PREPARSER ./calc input +input: | (1 + #) = 1111 +input: ./calc.at:1479: $PREPARSER ./calc input +stderr: + | (- *) + (1 2) = 1 +./calc.at:1485: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -196575,7 +196824,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () @@ -196589,7 +196838,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -196605,7 +196854,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () @@ -196616,7 +196865,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -196624,7 +196873,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -196639,7 +196888,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () @@ -196650,7 +196899,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -196658,7 +196907,7 @@ Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -196667,11 +196916,142 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () @@ -196683,13 +197063,6 @@ Cleanup: popping token end of file () Cleanup: popping nterm input () ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: -syntax error: invalid character: '#' -stderr: -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1482: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -196698,7 +197071,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () @@ -196712,7 +197085,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -196728,7 +197101,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () @@ -196739,7 +197112,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -196747,7 +197120,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -196762,7 +197135,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error Shifting token error () Entering state 11 Next token is token '*' () @@ -196773,7 +197146,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -196781,7 +197154,7 @@ Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -196790,22 +197163,28 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token "end of input" () Cleanup: popping nterm input () stderr: +./calc.at:1489: cat stderr +581. calc.at:1521: testing Calculator D parse.error=detailed %debug api.symbol.prefix={SYMB_} api.token.prefix={TOK_} %verbose ... +syntax error: invalid character: '#' +stderr: +./calc.at:1521: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: Starting parse Entering state 0 Reading a token @@ -196813,19 +197192,28 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -196840,19 +197228,28 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -196860,7 +197257,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -196870,43 +197267,54 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2222) + $3 = nterm exp (1) +error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -581. calc.at:1521: stderr: -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error: invalid character: '#' -./calc.at:1489: "$PERL" -pi -e 'use strict; + | (1 + #) = 1111 +./calc.at:1489: $PREPARSER ./calc input +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -196916,11 +197324,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - skipped (calc.at:1521) -582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed ... -./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -stderr: -./calc.at:1489: cat stderr Starting parse Entering state 0 Reading a token @@ -196928,8 +197331,22 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -196947,16 +197364,80 @@ -> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +580. calc.at:1520: skipped (calc.at:1520) +stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1479: cat stderr +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -196972,39 +197453,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -197018,35 +197492,16 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom ... -./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1482: cat stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: ./calc.at:1485: cat stderr + | (# + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input input: - | (- *) + (1 2) = 1 -./calc.at:1489: $PREPARSER ./calc input -stderr: -./calc.at:1482: "$PERL" -pi -e 'use strict; + | 1 + 2 * 3 + !+ ++ +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -197056,8 +197511,82 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: input: -./calc.at:1479: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (* *) + (*) + (*) +./calc.at:1485: $PREPARSER ./calc input +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +581. calc.at:1521: stderr: +stderr: +./calc.at:1489: cat stderr Starting parse Entering state 0 Reading a token @@ -197065,28 +197594,22 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -197101,92 +197624,94 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1485: $PREPARSER ./calc input -stderr: -stderr: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1521) Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -197194,11 +197719,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -197206,23 +197731,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -197239,12 +197764,16 @@ Next token is token '+' () Shifting token '+' () Entering state 14 -Reducing stack 0 by rule 17 (line 121): +Reducing stack 0 by rule 17 (line 108): $1 = token '!' () $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: +syntax error: invalid character: '#' +stderr: + | 1 + 2 * 3 + !- ++ input: Starting parse Entering state 0 @@ -197253,28 +197782,22 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () -Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -197289,66 +197812,144 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token "number" (2) -syntax error, unexpected number -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token "number" (2) -Error: discarding token "number" (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + | (# + 1) = 1111 +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1489: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token "number" (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -197362,19 +197963,17 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: cat stderr - | (# + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input -stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) -> $$ = nterm exp (1) Entering state 8 Reading a token @@ -197382,11 +197981,11 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2) +Shifting token "number" (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) -> $$ = nterm exp (2) Entering state 29 Reading a token @@ -197394,23 +197993,23 @@ Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (3) +Shifting token "number" (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -197424,52 +198023,122 @@ Shifting token '!' () Entering state 5 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): $1 = token '!' () - $2 = token '+' () + $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -input: -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + #) = 1111 -./calc.at:1482: $PREPARSER ./calc input -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -582. calc.at:1523: Starting parse +Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (2) +Shifting token "number" (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token "number" (3) +Shifting token "number" (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -197519,9 +198188,24 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -syntax error: invalid character: '#' +./calc.at:1479: cat stderr +./calc.at:1485: cat stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | (1 + # + 1) = 1111 +./calc.at:1479: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1485: $PREPARSER ./calc input ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -197532,7 +198216,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -583. calc.at:1524: Starting parse +582. calc.at:1523: testing Calculator D %locations parse.lac=full parse.error=detailed ... +./calc.at:1523: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +stderr: +stderr: +syntax error: invalid character: '#' +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse Entering state 0 Reading a token Next token is token number (1) @@ -197589,22 +198279,97 @@ Shifting token '!' () Entering state 5 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): $1 = token '!' () - $2 = token '-' () + $2 = token '+' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (calc.at:1523) - skipped (calc.at:1524) -584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace ... -./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y ./calc.at:1489: cat stderr stderr: +./calc.at:1482: cat stderr +syntax error: invalid character: '#' +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: +input: + | (1 + # + 1) = 1111 +input: +./calc.at:1489: $PREPARSER ./calc input + | (#) + (#) = 2222 +./calc.at:1482: $PREPARSER ./calc input + | 1 + 2 * 3 + !- ++ +./calc.at:1485: $PREPARSER ./calc input +stderr: stderr: +583. calc.at:1524: testing Calculator D %locations parse.lac=full parse.error=custom ... Starting parse Entering state 0 Reading a token @@ -197633,6 +198398,12 @@ Next token is token error () Error: discarding token error () Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -197682,6 +198453,18 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -197747,22 +198530,7 @@ $2 = token '-' () Cleanup: popping token '+' () Cleanup: popping nterm exp (7) -input: - - | (* *) + (*) + (*) -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -stderr: +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -197770,15 +198538,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -197800,12 +198565,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -197819,27 +198584,89 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1524: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +./calc.at:1479: cat stderr +stderr: +stderr: +stderr: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) Reading a token Next token is token ')' () Entering state 11 @@ -197851,20 +198678,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -197878,19 +198717,6 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: cat stderr -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: Starting parse Entering state 0 Reading a token @@ -197898,15 +198724,39 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -197918,81 +198768,283 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (2222) +Shifting token "number" (2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2222) +-> $$ = nterm exp (2222) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +input: + | (1 + 1) / (1 - 1) +./calc.at:1479: $PREPARSER ./calc input +stderr: +582. calc.at:1523: error: null divisor +./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + skipped (calc.at:1523) +stderr: +./calc.at:1489: cat stderr +error: null divisor +./calc.at:1485: cat stderr +./calc.at:1482: cat stderr +input: + + | (1 + 1) / (1 - 1) +./calc.at:1489: $PREPARSER ./calc input +input: +./calc.at:1479: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +583. calc.at:1524: | (#) + (#) = 2222 +./calc.at:1485: $PREPARSER ./calc input + skipped (calc.at:1524) +stderr: +input: +stderr: +./calc.at:1479: cat stderr +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (0) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -198006,43 +199058,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (1 + # + 1) = 1111 -./calc.at:1479: $PREPARSER ./calc input -./calc.at:1485: cat stderr -stderr: -./calc.at:1482: cat stderr -syntax error: invalid character: '#' -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (#) + (#) = 2222 -./calc.at:1485: $PREPARSER ./calc input -input: -stderr: -syntax error: invalid character: '#' - | (# + 1) = 1111 -stderr: -./calc.at:1482: $PREPARSER ./calc input +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -198140,11 +199156,14 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -584. calc.at:1525: ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr -585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union ... +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1482: $PREPARSER ./calc input + +stderr: +553. calc.at:1479: ok +stderr: stderr: -./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y Starting parse Entering state 0 Reading a token @@ -198152,55 +199171,101 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () -Error: discarding token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token "number" (1) -Error: discarding token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (2) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (2) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -198214,15 +199279,6 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - skipped (calc.at:1525) -586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed ... -./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1489: $PREPARSER ./calc input -stderr: Starting parse Entering state 0 Reading a token @@ -198320,73 +199376,85 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token "number" (1) Shifting token "number" (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token "number" (3) -Shifting token "number" (3) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) - +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -198394,19 +199462,27 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () Error: discarding token error () Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -198456,8 +199532,7 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -198467,73 +199542,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1479: cat stderr + +./calc.at:1489: cat stderr ./calc.at:1485: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -198544,9 +199554,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 + 2 * 3 + !- ++ -./calc.at:1489: $PREPARSER ./calc input +584. calc.at:1525: testing Calculator D %locations parse.lac=full parse.error=detailed parse.trace ... +./calc.at:1525: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +563. calc.at:1489: ok +./calc.at:1485: cat stderr ./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -198558,150 +199569,13 @@ }eg ' expout || exit 77 input: -stderr: -./calc.at:1485: cat stderr - | (1 + 1) / (1 - 1) -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1479: $PREPARSER ./calc input -input: -stderr: -error: null divisor -./calc.at:1479: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +585. calc.at:1530: testing Calculator D api.token.constructor %locations parse.error=custom api.value.type=union ... | (1 + #) = 1111 +./calc.at:1530: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y ./calc.at:1485: $PREPARSER ./calc input -./calc.at:1482: cat stderr + stderr: -585. calc.at:1530: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token "number" (2) -Shifting token "number" (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token "number" (3) -Shifting token "number" (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +./calc.at:1482: cat stderr Starting parse Entering state 0 Reading a token @@ -198779,14 +199653,9 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () - skipped (calc.at:1530) ./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: null divisor -586. calc.at:1531: stderr: - skipped (calc.at:1531) input: - | (1 + # + 1) = 1111 Starting parse Entering state 0 Reading a token @@ -198864,31 +199733,11 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () + | (# + 1) = 1111 ./calc.at:1482: $PREPARSER ./calc input -587. calc.at:1532: testing Calculator D api.push-pull=both ... -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +586. calc.at:1531: testing Calculator D api.token.constructor %locations parse.error=detailed ... +./calc.at:1531: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y stderr: - -./calc.at:1479: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -198896,22 +199745,8 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -198972,34 +199807,29 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () - ./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr stderr: -./calc.at:1479: cat stderr -Starting parse +stderr: +stdout: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +584. calc.at:1525: Starting parse Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token syntax error: invalid character: '#' Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -199060,24 +199890,34 @@ Entering state 16 Cleanup: popping token "end of input" () Cleanup: popping nterm input () -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1489: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + + skipped (calc.at:1525) ./calc.at:1485: cat stderr input: -553. calc.at:1479: ok - | (#) + (#) = 2222 -./calc.at:1489: $PREPARSER ./calc input -input: - | (# + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input + + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1482: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -199088,6 +199928,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1489: $PREPARSER ./calc input +input: +585. calc.at:1530: | (# + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: cat stderr + skipped (calc.at:1530) stderr: stderr: Starting parse @@ -199104,104 +199950,6 @@ Next token is token error () Error: discarding token error () Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token Next token is token '+' () Error: discarding token '+' () Reading a token @@ -199257,92 +200005,79 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (2222) -Shifting token "number" (2222) +Next token is token number (7) +Shifting token number (7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2222) --> $$ = nterm exp (2222) + $1 = token number (7) +-> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) + $1 = nterm exp (7) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (7) +-> $$ = nterm exp (7) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) + $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -199351,756 +200086,764 @@ -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1482: cat stderr -stderr: - -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 Reading a token Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (5) +Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... -587. calc.at:1532: ./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y -input: - | (1 + 1) / (1 - 1) -./calc.at:1482: $PREPARSER ./calc input - skipped (calc.at:1532) -stderr: -589. calc.at:1544: testing Calculator Java ... -./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -Starting parse -Entering state 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 10 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Entering state 32 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) -> $$ = nterm exp (1) -Entering state 12 +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 19 +Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr - -stderr: -./calc.at:1485: cat stderr -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 +Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) Entering state 12 Next token is token ')' () Shifting token ')' () Entering state 26 Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = nterm exp (2) + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 19 +Entering state 2 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 +Entering state 10 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -input: - | (1 + #) = 1111 -./calc.at:1489: $PREPARSER ./calc input -input: -stderr: -590. calc.at:1545: testing Calculator Java parse.error=custom ... - | (1 + # + 1) = 1111 -./calc.at:1485: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (-4) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1482: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -588. calc.at:1533: stderr: -stderr: -Starting parse -Entering state 0 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +-> $$ = nterm exp (-1) +Entering state 28 Reading a token Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (2) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () - skipped (calc.at:1533) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 -Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (256) +Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (256) +-> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (256) +-> $$ = nterm exp (256) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1482: cat stderr -Starting parse -Entering state 0 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) +-> $$ = nterm exp (4) Entering state 8 Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token number (64) +Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (64) +-> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (64) +-> $$ = nterm exp (64) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () -589. calc.at:1544: -557. calc.at:1482: stderr: - ok -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - skipped (calc.at:1544) -stdout: -./calc.at:1487: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -591. calc.at:1546: testing Calculator Java parse.error=detailed ... -./calc.at:1489: cat stderr -./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1485: cat stderr -input: -input: - -590. calc.at:1545: | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1487: $PREPARSER ./calc input - | (# + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input - skipped (calc.at:1545) +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -input: Starting parse Entering state 0 Reading a token @@ -200118,15 +200861,15 @@ Next token is token '+' () Error: discarding token '+' () Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -200137,16 +200880,16 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -200155,32 +200898,32 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of file () Cleanup: popping nterm input () - | (1 + 1) / (1 - 1) -./calc.at:1485: $PREPARSER ./calc input -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: +587. calc.at:1532: testing Calculator D api.push-pull=both ... +./calc.at:1532: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y Starting parse Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -200192,7 +200935,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -200204,20 +200947,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -200230,13 +200973,13 @@ Next token is token number (7) Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -200245,12 +200988,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 @@ -200258,7 +201001,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -200270,7 +201013,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -200286,26 +201029,26 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -200322,19 +201065,19 @@ Next token is token number (5) Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -200343,12 +201086,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200357,11 +201100,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200374,7 +201117,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 @@ -200386,20 +201129,20 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -200415,19 +201158,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -200436,12 +201179,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200458,13 +201201,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -200472,7 +201215,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -200486,13 +201229,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -200505,13 +201248,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -200520,12 +201263,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200534,11 +201277,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200559,25 +201302,25 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -200593,19 +201336,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -200614,12 +201357,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200628,11 +201371,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200641,7 +201384,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -200653,13 +201396,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -200672,13 +201415,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -200695,19 +201438,19 @@ Next token is token number (4) Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): +Reducing stack 0 by rule 11 (line 102): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -200716,12 +201459,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200730,7 +201473,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -200746,7 +201489,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -200758,13 +201501,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -200773,7 +201516,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -200781,7 +201524,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -200794,13 +201537,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -200809,12 +201552,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200823,11 +201566,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 87): +Reducing stack 0 by rule 3 (line 74): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200836,7 +201579,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 @@ -200848,7 +201591,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 @@ -200860,20 +201603,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -200886,13 +201629,13 @@ Next token is token number (256) Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -200901,12 +201644,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -200919,7 +201662,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -200931,13 +201674,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -200946,7 +201689,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -200960,13 +201703,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -200979,13 +201722,13 @@ Next token is token number (64) Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -200994,26 +201737,142 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 83): +Reducing stack 0 by rule 2 (line 70): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () + | (1 + # + 1) = 1111 + +./calc.at:1482: $PREPARSER ./calc input +input: stderr: + | 1 2 +./calc.at:1489: $PREPARSER ./calc input +586. calc.at:1531: Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token "number" (1) +Error: discarding token "number" (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + skipped (calc.at:1531) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - Starting parse Entering state 0 Reading a token @@ -201021,8 +201880,22 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token syntax error: invalid character: '#' Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 Next token is token error () @@ -201034,1057 +201907,2831 @@ Next token is token "number" (1) Error: discarding token "number" (1) Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token "number" (1111) +Shifting token "number" (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () +stderr: +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1485: cat stderr + +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +588. calc.at:1533: testing Calculator D parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... +./calc.at:1533: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o calc.d calc.y +input: +./calc.at:1482: cat stderr + | (1 + # + 1) = 1111 +./calc.at:1485: $PREPARSER ./calc input +587. calc.at:1532: ./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1532) +stderr: +input: +589. calc.at:1544: testing Calculator Java ... +./calc.at:1544: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + | (1 + 1) / (1 - 1) +./calc.at:1482: $PREPARSER ./calc input +./calc.at:1489: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + +./calc.at:1482: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +588. calc.at:1533: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token "number" (1) +Shifting token "number" (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" () +Entering state 16 +Cleanup: popping token "end of input" () +Cleanup: popping nterm input () + skipped (calc.at:1533) +input: + | 1//2 +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: + +590. calc.at:1545: testing Calculator Java parse.error=custom ... +./calc.at:1545: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1485: cat stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1482: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () + | (1 + 1) / (1 - 1) +./calc.at:1485: $PREPARSER ./calc input +./calc.at:1482: cat stderr +589. calc.at:1544: 591. calc.at:1546: testing Calculator Java parse.error=detailed ... +stderr: + skipped (calc.at:1544) +./calc.at:1546: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +557. calc.at:1482: ok +./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 104): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 117): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 106): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () + +590. calc.at:1545: ./calc.at:1489: cat stderr + + skipped (calc.at:1545) +./calc.at:1485: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +592. calc.at:1547: testing Calculator Java parse.error=verbose ... +./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +input: + +./calc.at:1485: cat stderr + | error +./calc.at:1489: $PREPARSER ./calc input +stderr: +559. calc.at:1485: ok +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +591. calc.at:1546: skipped (calc.at:1546) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () + + +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +593. calc.at:1548: testing Calculator Java %locations parse.error=custom ... +./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +594. calc.at:1549: testing Calculator Java %locations parse.error=detailed ... +./calc.at:1489: cat stderr +./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +input: +595. calc.at:1550: testing Calculator Java %locations parse.error=verbose ... +./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + | 1 = 2 = 3 +./calc.at:1489: $PREPARSER ./calc input +stderr: +592. calc.at:1547: Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (calc.at:1547) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose ... +./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} ... +593. calc.at:1548: ./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + + skipped (calc.at:1548) +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: cat stderr + +596. calc.at:1551: input: +595. calc.at:1550: | + | +1 +./calc.at:1489: $PREPARSER ./calc input +594. calc.at:1549: skipped (calc.at:1551) +stderr: + skipped (calc.at:1550) + skipped (calc.at:1549) +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () + + + +597. calc.at:1552: skipped (calc.at:1552) +599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations ... +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y + +598. calc.at:1554: testing Calculator Java api.push-pull=both ... +./calc.at:1489: cat stderr +600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both ... +./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1489: $PREPARSER ./calc /dev/null +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1491: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full ... +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () +./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1491: $PREPARSER ./calc input +601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both ... +./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +599. calc.at:1555: Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) +Entering state 27 +Reading a token +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) +Entering state 8 +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (12.12-13.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1485: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) + skipped (calc.at:1555) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +600. calc.at:1556: ./calc.at:1489: cat stderr + skipped (calc.at:1556) +598. calc.at:1554: stderr: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (7) -Shifting token number (7) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (7) --> $$ = nterm exp (7) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) +Next token is token '\n' (1.14-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) Entering state 10 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) Entering state 2 Reading a token -Next token is token number (5) -Shifting token number (5) +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (5) --> $$ = nterm exp (5) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) Entering state 10 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) Entering state 10 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) Entering state 6 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) Entering state 2 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) Entering state 10 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) Entering state 2 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) Entering state 19 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) Entering state 28 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) Entering state 8 -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) Entering state 28 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) Entering state 2 Reading a token -Next token is token number (4) -Shifting token number (4) +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (4) --> $$ = nterm exp (4) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) Entering state 10 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 115): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) Entering state 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) Entering state 19 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) Entering state 28 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) Entering state 28 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 104): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) Entering state 32 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) Entering state 18 Reading a token -Next token is token number (256) -Shifting token number (256) +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (256) --> $$ = nterm exp (256) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) Entering state 12 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) Entering state 23 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) Entering state 32 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) Entering state 23 Reading a token -Next token is token number (3) -Shifting token number (3) +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) Entering state 32 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 116): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (64) --> $$ = nterm exp (64) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) Entering state 17 -Reducing stack 0 by rule 2 (line 83): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) + skipped (calc.at:1554) +input: +603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... +./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y input: + | 1 2 +./calc.at:1491: $PREPARSER ./calc input + + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1489: $PREPARSER ./calc input +stderr: + Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +stderr: + +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () @@ -202093,168 +204740,96 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 +Entering state 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) - $2 = token '-' () + $2 = token '+' () $3 = nterm exp (1) --> $$ = nterm exp (0) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 117): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor -> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () - | 1 2 -./calc.at:1487: $PREPARSER ./calc input -592. calc.at:1547: testing Calculator Java parse.error=verbose ... -./calc.at:1547: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -stderr: -593. calc.at:1548: testing Calculator Java %locations parse.error=custom ... -./calc.at:1548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr -stderr: -Starting parse -Entering state 0 +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1485: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -591. calc.at:1546: input: -594. calc.at:1549: testing Calculator Java %locations parse.error=detailed ... -./calc.at:1485: cat stderr - | (1 + # + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input - skipped (calc.at:1546) -./calc.at:1549: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -stderr: -./calc.at:1487: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Entering state 29 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) Entering state 12 -Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' Error: popping token '+' () -Error: popping nterm exp (1) +Error: popping nterm exp (3) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '+' () -Error: discarding token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -202266,86 +204841,62 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) -Entering state 27 +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -559. calc.at:1485: ok -input: - | 1//2 -./calc.at:1487: $PREPARSER ./calc input -stderr: -595. calc.at:1550: testing Calculator Java %locations parse.error=verbose ... - -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Error: discarding token "number" (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 @@ -202357,32 +204908,40 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 8 +Entering state 29 Reading a token Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1111) -Shifting token "number" (1111) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1111) --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -202392,89 +204951,64 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1550: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse +stderr: +stderr: +stdout: +602. calc.at:1560: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -592. calc.at:1547: skipped (calc.at:1547) -stderr: - +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 Reading a token -Next token is token '/' () -syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1489: cat stderr -593. calc.at:1548: skipped (calc.at:1548) -./calc.at:1487: cat stderr -input: - | (1 + 1) / (1 - 1) -./calc.at:1489: $PREPARSER ./calc input -595. calc.at:1550: stderr: - skipped (calc.at:1550) -input: - -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token @@ -202482,212 +205016,199 @@ Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token ')' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) -> $$ = nterm exp (2) Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) -Entering state 28 +Entering state 29 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2) + $2 = token '+' () $3 = nterm exp (1) --> $$ = nterm exp (0) +-> $$ = nterm exp (3) Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token ')' () +syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" () -Entering state 16 -Cleanup: popping token "end of input" () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -594. calc.at:1549: 596. calc.at:1551: testing Calculator Java parse.trace parse.error=verbose ... - | error -./calc.at:1487: $PREPARSER ./calc input - skipped (calc.at:1549) -./calc.at:1551: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - -stderr: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 12 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (1) -Shifting token "number" (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 29 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (2) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (2) -Entering state 8 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) + $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1) -Shifting token "number" (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1) --> $$ = nterm exp (1) -Entering state 28 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 30 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Next token is token '*' () +Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (0) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -202697,29 +205218,24 @@ Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" () +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" () +Cleanup: popping token end of input () Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token invalid token () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +601. calc.at:1557: ./calc.at:1487: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token invalid token () -597. calc.at:1552: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} ... -./calc.at:1552: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1489: "$PERL" -pi -e 'use strict; + skipped (calc.at:1560) + skipped (calc.at:1557) +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -202729,98 +205245,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -598. calc.at:1554: testing Calculator Java api.push-pull=both ... -./calc.at:1554: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1489: cat stderr -./calc.at:1487: cat stderr -563. calc.at:1489: ok -599. calc.at:1555: testing Calculator Java api.push-pull=both parse.error=detailed %locations ... -./calc.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y input: - | 1 = 2 = 3 -./calc.at:1487: $PREPARSER ./calc input -596. calc.at:1551: stderr: -600. calc.at:1556: testing Calculator Java parse.trace parse.error=custom %locations %lex-param {InputStream is} api.push-pull=both ... - skipped (calc.at:1551) -stdout: -stderr: -./calc.at:1556: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -./calc.at:1486: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -601. calc.at:1557: testing Calculator Java parse.trace parse.error=verbose %locations %lex-param {InputStream is} api.push-pull=both ... -./calc.at:1557: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () - -597. calc.at:1552: input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -202834,17 +205260,34 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 - skipped (calc.at:1552) -598. calc.at:1554: ./calc.at:1486: $PREPARSER ./calc input - skipped (calc.at:1554) +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1491: cat stderr stderr: + +stderr: +stdout: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +606. torture.at:270: testing State number type: 128 states ... +./torture.at:270: ruby $abs_top_srcdir/tests/linear 128 >input.y || exit 77 +605. torture.at:216: testing Big horizontal ... +./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77 +input: Starting parse Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -202856,7 +205299,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -202868,20 +205311,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -202894,13 +205337,13 @@ Next token is token number (7) Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -202909,12 +205352,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 @@ -202922,7 +205365,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -202934,7 +205377,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -202950,26 +205393,26 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -202986,19 +205429,19 @@ Next token is token number (5) Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -203007,12 +205450,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203021,11 +205464,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203038,7 +205481,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 @@ -203050,20 +205493,20 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -203079,19 +205522,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -203100,12 +205543,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203122,13 +205565,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -203136,7 +205579,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -203150,13 +205593,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -203169,13 +205612,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -203184,12 +205627,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203198,11 +205641,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203223,25 +205666,25 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -203257,19 +205700,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -203278,12 +205721,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203292,11 +205735,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203305,7 +205748,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -203317,13 +205760,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -203336,13 +205779,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -203359,19 +205802,19 @@ Next token is token number (4) Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -203380,12 +205823,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203394,7 +205837,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -203410,7 +205853,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -203422,13 +205865,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -203437,7 +205880,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -203445,7 +205888,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -203458,13 +205901,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -203473,12 +205916,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203487,11 +205930,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203500,7 +205943,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 @@ -203512,7 +205955,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 @@ -203524,20 +205967,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -203550,13 +205993,13 @@ Next token is token number (256) Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -203565,12 +206008,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203583,7 +206026,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -203595,13 +206038,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -203610,7 +206053,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -203624,13 +206067,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -203643,13 +206086,13 @@ Next token is token number (64) Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -203658,34 +206101,72 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1487: cat stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc + | 1//2 +./calc.at:1491: $PREPARSER ./calc input stderr: +603. calc.at:1561: stderr: +./calc.at:1489: cat stderr +--- /dev/null 2023-04-20 00:26:40.000000000 +1400 ++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr 2023-04-20 07:48:35.814940287 +1400 +@@ -0,0 +1 @@ ++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found +606. torture.at:270: input: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) + skipped (calc.at:1561) Starting parse Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -203697,7 +206178,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -203709,20 +206190,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 30 Reading a token Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (3) -> $$ = nterm exp (6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (6) @@ -203735,13 +206216,13 @@ Next token is token number (7) Shifting token number (7) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (7) -> $$ = nterm exp (7) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (7) $2 = token '=' () $3 = nterm exp (7) @@ -203750,12 +206231,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (7) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 @@ -203763,7 +206244,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -203775,7 +206256,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 29 @@ -203791,26 +206272,26 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (3) -> $$ = nterm exp (-3) Entering state 30 Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (2) $2 = token '*' () $3 = nterm exp (-3) -> $$ = nterm exp (-6) Entering state 29 Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (-6) @@ -203827,19 +206308,19 @@ Next token is token number (5) Shifting token number (5) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (5) -> $$ = nterm exp (5) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (5) -> $$ = nterm exp (-5) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-5) $2 = token '=' () $3 = nterm exp (-5) @@ -203848,12 +206329,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-5) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203862,11 +206343,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203879,7 +206360,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 @@ -203891,20 +206372,20 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (1) $2 = token '^' () $3 = nterm exp (2) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -203920,19 +206401,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -203941,12 +206422,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -203963,13 +206444,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -203977,7 +206458,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -203991,13 +206472,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (-1) $2 = token '^' () $3 = nterm exp (2) @@ -204010,13 +206491,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1) $2 = token '=' () $3 = nterm exp (1) @@ -204025,12 +206506,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -204039,11 +206520,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -204064,25 +206545,25 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (-1) -> $$ = nterm exp (1) Entering state 10 Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) @@ -204098,19 +206579,19 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (1) -> $$ = nterm exp (-1) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-1) $2 = token '=' () $3 = nterm exp (-1) @@ -204119,12 +206600,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-1) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -204133,11 +206614,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -204146,7 +206627,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -204158,13 +206639,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 28 Reading a token Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (2) @@ -204177,13 +206658,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (-1) $2 = token '-' () $3 = nterm exp (3) @@ -204200,19 +206681,19 @@ Next token is token number (4) Shifting token number (4) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (4) -> $$ = nterm exp (4) Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): +Reducing stack 0 by rule 11 (line 115): $1 = token '-' () $2 = nterm exp (4) -> $$ = nterm exp (-4) Entering state 27 Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (-4) $2 = token '=' () $3 = nterm exp (-4) @@ -204221,12 +206702,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (-4) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -204235,7 +206716,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 @@ -204251,7 +206732,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -204263,13 +206744,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (2) $2 = token '-' () $3 = nterm exp (3) @@ -204278,7 +206759,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (-1) $3 = token ')' () @@ -204286,7 +206767,7 @@ Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (-1) @@ -204299,13 +206780,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2) $2 = token '=' () $3 = nterm exp (2) @@ -204314,12 +206795,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -204328,11 +206809,11 @@ Next token is token '\n' () Shifting token '\n' () Entering state 3 -Reducing stack 0 by rule 3 (line 74): +Reducing stack 0 by rule 3 (line 87): $1 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -204341,7 +206822,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 8 @@ -204353,7 +206834,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 @@ -204365,20 +206846,20 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (3) -> $$ = nterm exp (8) Entering state 32 Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (8) @@ -204391,13 +206872,13 @@ Next token is token number (256) Shifting token number (256) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (256) -> $$ = nterm exp (256) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (256) $2 = token '=' () $3 = nterm exp (256) @@ -204406,12 +206887,12 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (256) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () @@ -204424,7 +206905,7 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 @@ -204436,13 +206917,13 @@ Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) Entering state 32 Reading a token Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (2) $2 = token '^' () $3 = nterm exp (2) @@ -204451,7 +206932,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (4) $3 = token ')' () @@ -204465,13 +206946,13 @@ Next token is token number (3) Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (3) -> $$ = nterm exp (3) Entering state 32 Reading a token Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): +Reducing stack 0 by rule 12 (line 116): $1 = nterm exp (4) $2 = token '^' () $3 = nterm exp (3) @@ -204484,13 +206965,13 @@ Next token is token number (64) Shifting token number (64) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (64) -> $$ = nterm exp (64) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (64) $2 = token '=' () $3 = nterm exp (64) @@ -204499,157 +206980,66 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (64) $2 = token '\n' () -> $$ = nterm line () Entering state 17 -Reducing stack 0 by rule 2 (line 70): +Reducing stack 0 by rule 2 (line 83): $1 = nterm input () $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -input: - | 1 2 + skipped (torture.at:270) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 ./calc.at:1486: $PREPARSER ./calc input input: stderr: -599. calc.at:1555: Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | - | +1 + | 1 2 ./calc.at:1487: $PREPARSER ./calc input - skipped (calc.at:1555) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () +input: Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -602. calc.at:1560: testing Calculator Java parse.trace parse.error=custom %locations parse.lac=full ... - -./calc.at:1560: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y -stderr: -604. torture.at:132: testing Big triangle ... -./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 87): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -600. calc.at:1556: 601. calc.at:1557: skipped (calc.at:1556) - skipped (calc.at:1557) -603. calc.at:1561: testing Calculator Java parse.trace parse.error=custom %locations api.push-pull=both parse.lac=full ... -./calc.at:1561: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated -o Calc.java Calc.y - -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1487: cat stderr - -./calc.at:1487: $PREPARSER ./calc /dev/null -./calc.at:1486: cat stderr -605. torture.at:216: testing Big horizontal ... -./torture.at:230: "$PERL" -w ./gengram.pl 1000 || exit 77 -stderr: -606. torture.at:270: testing State number type: 128 states ... -./torture.at:270: ruby $abs_top_srcdir/tests/linear 128 >input.y || exit 77 -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token end of file () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ---- /dev/null 2024-05-20 04:47:26.000000000 -1200 -+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/stderr 2024-05-21 12:00:17.257227659 -1200 -@@ -0,0 +1 @@ -+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/606/test-source: line 14: ruby: command not found -606. torture.at:270: stderr: -Starting parse -Entering state 0 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Now at end of input. -syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) -Cleanup: discarding lookahead token end of file () - skipped (torture.at:270) -input: - | 1//2 -./calc.at:1486: $PREPARSER ./calc input +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) + | (!!) + (1 2) = 1 +./calc.at:1489: $PREPARSER ./calc input stdout: -stderr: %code top { /* -*- c -*- */ /* Adjust to the compiler. We used to do it here, but each time we add a new line, @@ -205786,6 +208176,991 @@ (void) argv; return yyparse (); } + +stderr: +stderr: + +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (7) +Shifting token number (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (7) +-> $$ = nterm exp (7) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 +Reading a token +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (4) +Shifting token number (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 28 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) + $2 = token '=' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 12 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (4) + $3 = token ')' () +-> $$ = nterm exp (4) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +604. torture.at:132: testing Big triangle ... +./torture.at:138: "$PERL" -w ./gengram.pl 200 || exit 77 +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y Starting parse Entering state 0 Reading a token @@ -205797,19 +209172,322 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (7) +Shifting token number (7) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (7) +-> $$ = nterm exp (7) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7) + $2 = token '=' () + $3 = nterm exp (7) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 10 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (3) +-> $$ = nterm exp (-3) +Entering state 30 +Next token is token '=' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (-3) +-> $$ = nterm exp (-6) +Entering state 29 +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (-6) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (5) +Shifting token number (5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (5) +-> $$ = nterm exp (5) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (5) +-> $$ = nterm exp (-5) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-5) + $2 = token '=' () + $3 = nterm exp (-5) +-> $$ = nterm exp (-5) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-5) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 10 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (-1) + $3 = token ')' () +-> $$ = nterm exp (-1) +Entering state 8 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (-1) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (1) Shifting token number (1) @@ -205817,447 +209495,539 @@ Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Entering state 27 Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -./torture.at:236: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - -607. torture.at:271: testing State number type: 129 states ... -./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77 -./calc.at:1487: cat stderr -602. calc.at:1560: skipped (calc.at:1560) ---- /dev/null 2024-05-20 04:47:26.000000000 -1200 -+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr 2024-05-21 12:00:17.329227659 -1200 -@@ -0,0 +1 @@ -+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found -input: -607. torture.at:271: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1487: $PREPARSER ./calc input - -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - skipped (torture.at:271) -603. calc.at:1561: stderr: -Starting parse -Entering state 0 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1) + $2 = token '=' () + $3 = nterm exp (1) +-> $$ = nterm exp (1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 10 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (-1) +-> $$ = nterm exp (1) +Entering state 10 +Next token is token '=' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 +Entering state 10 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (1) +-> $$ = nterm exp (-1) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-1) + $2 = token '=' () + $3 = nterm exp (-1) +-> $$ = nterm exp (-1) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-1) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 +Entering state 8 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 28 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '-' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (2) +-> $$ = nterm exp (-1) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (-1) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-4) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (4) +Shifting token number (4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (4) +-> $$ = nterm exp (4) +Entering state 10 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) +Next token is token '\n' () +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' () + $2 = nterm exp (4) +-> $$ = nterm exp (-4) +Entering state 27 +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (-4) + $2 = token '=' () + $3 = nterm exp (-4) +-> $$ = nterm exp (-4) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (-4) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 12 +Entering state 8 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '-' () +Shifting token '-' () +Entering state 19 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 28 Reading a token Next token is token ')' () -Entering state 11 +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (2) + $2 = token '-' () + $3 = nterm exp (3) +-> $$ = nterm exp (-1) +Entering state 12 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (-1) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (-1) +Entering state 28 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (-1) +-> $$ = nterm exp (2) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (2) +-> $$ = nterm exp (2) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: cat stderr - skipped (calc.at:1561) - -608. torture.at:272: testing State number type: 256 states ... -./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77 -input: -stderr: - | error -./calc.at:1486: $PREPARSER ./calc input ---- /dev/null 2024-05-20 04:47:26.000000000 -1200 -+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr 2024-05-21 12:00:17.409227659 -1200 -@@ -0,0 +1 @@ -+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found -stderr: -608. torture.at:272: Starting parse -Entering state 0 +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 8 Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 32 +Reading a token +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (8) +Entering state 32 +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (2) + $2 = token '^' () + $3 = nterm exp (8) +-> $$ = nterm exp (256) Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token number (256) +Shifting token number (256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (256) +-> $$ = nterm exp (256) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (256) + $2 = token '=' () + $3 = nterm exp (256) +-> $$ = nterm exp (256) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (256) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '^' () +Shifting token '^' () +Entering state 23 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (2) -> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 +Entering state 32 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): +Next token is token ')' () +Reducing stack 0 by rule 12 (line 103): $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) + $2 = token '^' () + $3 = nterm exp (2) +-> $$ = nterm exp (4) Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () - $2 = token error () + $2 = nterm exp (4) $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +-> $$ = nterm exp (4) +Entering state 8 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token '^' () +Shifting token '^' () +Entering state 23 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 32 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4) + $2 = token '^' () + $3 = nterm exp (3) +-> $$ = nterm exp (64) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (64) +Shifting token number (64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (64) +-> $$ = nterm exp (64) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (64) + $2 = token '=' () + $3 = nterm exp (64) +-> $$ = nterm exp (64) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (64) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input () + $2 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +Starting parse +Entering state 0 Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 +Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 @@ -206269,49 +210039,25 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Error: popping token '*' () -Error: popping nterm exp (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -206319,11 +210065,11 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (3333) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (4444) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '=' () Shifting token '=' () @@ -206332,69 +210078,139 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (4444) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (4444) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () - +input: +./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] ['\n']) +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) + | 1 2 +./calc.at:1486: $PREPARSER ./calc input +608. torture.at:272: testing State number type: 256 states ... +./torture.at:272: ruby $abs_top_srcdir/tests/linear 256 >input.y || exit 77 +607. torture.at:271: testing State number type: 129 states ... +./torture.at:271: ruby $abs_top_srcdir/tests/linear 129 >input.y || exit 77 +stderr: +--- /dev/null 2023-04-20 00:26:40.000000000 +1400 ++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/stderr 2023-04-20 07:48:35.986933654 +1400 +@@ -0,0 +1 @@ ++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/608/test-source: line 14: ruby: command not found +--- /dev/null 2023-04-20 00:26:40.000000000 +1400 ++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/stderr 2023-04-20 07:48:35.998933192 +1400 +@@ -0,0 +1 @@ ++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/607/test-source: line 14: ruby: command not found +607. torture.at:271: 608. torture.at:272: Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (torture.at:271) +input: skipped (torture.at:272) stderr: -609. torture.at:273: testing State number type: 257 states ... -./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77 + | error +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1487: cat stderr +./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () ---- /dev/null 2024-05-20 04:47:26.000000000 -1200 -+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr 2024-05-21 12:00:17.473227659 -1200 -@@ -0,0 +1 @@ -+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found -./calc.at:1487: cat stderr -610. torture.at:274: testing State number type: 32768 states ... -./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77 -609. torture.at:273: --- /dev/null 2024-05-20 04:47:26.000000000 -1200 -+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr 2024-05-21 12:00:17.481227659 -1200 -@@ -0,0 +1 @@ -+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Cleanup: discarding lookahead token number (2) +stderr: + +./calc.at:1489: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) -610. torture.at:274: skipped (torture.at:273) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +609. torture.at:273: testing State number type: 257 states ... +./torture.at:273: ruby $abs_top_srcdir/tests/linear 257 >input.y || exit 77 input: + | (- *) + (1 2) = 1 +./calc.at:1489: $PREPARSER ./calc input +610. torture.at:274: testing State number type: 32768 states ... +./torture.at:274: ruby $abs_top_srcdir/tests/linear 32768 >input.y || exit 77 + | 1//2 +./calc.at:1487: $PREPARSER ./calc input +stderr: ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -206405,19 +210221,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - skipped (torture.at:274) - -611. torture.at:275: testing State number type: 65536 states ... -./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77 - | (!!) + (1 2) = 1 -./calc.at:1486: cat stderr -./calc.at:1487: $PREPARSER ./calc input ---- /dev/null 2024-05-20 04:47:26.000000000 -1200 -+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr 2024-05-21 12:00:17.537227659 -1200 +--- /dev/null 2023-04-20 00:26:40.000000000 +1400 ++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/stderr 2023-04-20 07:48:36.130928102 +1400 @@ -0,0 +1 @@ -+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found -611. torture.at:275: stderr: - skipped (torture.at:275) ++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/609/test-source: line 14: ruby: command not found +stderr: Starting parse Entering state 0 Reading a token @@ -206425,23 +210233,28 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -206459,13 +210272,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -206477,7 +210290,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -206485,7 +210298,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -206498,13 +210311,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -206514,29 +210327,59 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () +stderr: +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +609. torture.at:273: --- /dev/null 2023-04-20 00:26:40.000000000 +1400 ++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/stderr 2023-04-20 07:48:36.134927948 +1400 +@@ -0,0 +1 @@ ++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/610/test-source: line 14: ruby: command not found +610. torture.at:274: Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () ./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 = 2 = 3 -./calc.at:1486: $PREPARSER ./calc input - + skipped (torture.at:273) +./calc.at:1486: cat stderr + skipped (torture.at:274) +stderr: stderr: -612. torture.at:276: testing State number type: 65537 states ... -./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77 Starting parse Entering state 0 Reading a token @@ -206544,23 +210387,28 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 120): - $1 = token '!' () - $2 = token '!' () +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () Shifting token error () Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () +Entering state 11 +Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -206578,13 +210426,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +syntax error, unexpected number Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -206596,7 +210444,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -206604,7 +210452,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -206617,13 +210465,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -206633,62 +210481,81 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () - ---- /dev/null 2024-05-20 04:47:26.000000000 -1200 -+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr 2024-05-21 12:00:17.601227659 -1200 -@@ -0,0 +1 @@ -+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found -stderr: -612. torture.at:276: Starting parse +Starting parse Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '/' () +Shifting token '/' () +Entering state 22 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token '/' () +syntax error on token ['/'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: + | 1//2 + +./calc.at:1486: $PREPARSER ./calc input + +./calc.at:1491: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () +Cleanup: discarding lookahead token '/' () ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (torture.at:276) -613. torture.at:385: testing Many lookahead tokens ... -./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77 -./calc.at:1487: cat stderr stderr: +input: Starting parse Entering state 0 Reading a token @@ -206700,2096 +210567,117 @@ -> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '/' () +syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '/' () +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1487: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 27 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -stderr: +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1489: cat stderr input: -stdout: -615. torture.at:531: testing Exploding the Stack Size with Malloc ... - | (- *) + (1 2) = 1 -stdout: - + | error +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1487: $PREPARSER ./calc input -stderr: -stdout: -./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1491: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -614. torture.at:485: testing Exploding the Stack Size with Alloca ... -%code top { /* -*- c -*- */ -/* Adjust to the compiler. - We used to do it here, but each time we add a new line, - we have to adjust all the line numbers in error messages. - It's simpler to use a constant include to a varying file. */ -#include -} - -%define parse.error verbose -%debug -%{ -#include -#include -#include -#define MAX 200 -static int yylex (void); -#include - -/* !POSIX */ static void yyerror (const char *msg); -%} -%union -{ - int val; -}; - -%token END "end" -%type exp input -%token t1 1 "1" -%token t2 2 "2" -%token t3 3 "3" -%token t4 4 "4" -%token t5 5 "5" -%token t6 6 "6" -%token t7 7 "7" -%token t8 8 "8" -%token t9 9 "9" -%token t10 10 "10" -%token t11 11 "11" -%token t12 12 "12" -%token t13 13 "13" -%token t14 14 "14" -%token t15 15 "15" -%token t16 16 "16" -%token t17 17 "17" -%token t18 18 "18" -%token t19 19 "19" -%token t20 20 "20" -%token t21 21 "21" -%token t22 22 "22" -%token t23 23 "23" -%token t24 24 "24" -%token t25 25 "25" -%token t26 26 "26" -%token t27 27 "27" -%token t28 28 "28" -%token t29 29 "29" -%token t30 30 "30" -%token t31 31 "31" -%token t32 32 "32" -%token t33 33 "33" -%token t34 34 "34" -%token t35 35 "35" -%token t36 36 "36" -%token t37 37 "37" -%token t38 38 "38" -%token t39 39 "39" -%token t40 40 "40" -%token t41 41 "41" -%token t42 42 "42" -%token t43 43 "43" -%token t44 44 "44" -%token t45 45 "45" -%token t46 46 "46" -%token t47 47 "47" -%token t48 48 "48" -%token t49 49 "49" -%token t50 50 "50" -%token t51 51 "51" -%token t52 52 "52" -%token t53 53 "53" -%token t54 54 "54" -%token t55 55 "55" -%token t56 56 "56" -%token t57 57 "57" -%token t58 58 "58" -%token t59 59 "59" -%token t60 60 "60" -%token t61 61 "61" -%token t62 62 "62" -%token t63 63 "63" -%token t64 64 "64" -%token t65 65 "65" -%token t66 66 "66" -%token t67 67 "67" -%token t68 68 "68" -%token t69 69 "69" -%token t70 70 "70" -%token t71 71 "71" -%token t72 72 "72" -%token t73 73 "73" -%token t74 74 "74" -%token t75 75 "75" -%token t76 76 "76" -%token t77 77 "77" -%token t78 78 "78" -%token t79 79 "79" -%token t80 80 "80" -%token t81 81 "81" -%token t82 82 "82" -%token t83 83 "83" -%token t84 84 "84" -%token t85 85 "85" -%token t86 86 "86" -%token t87 87 "87" -%token t88 88 "88" -%token t89 89 "89" -%token t90 90 "90" -%token t91 91 "91" -%token t92 92 "92" -%token t93 93 "93" -%token t94 94 "94" -%token t95 95 "95" -%token t96 96 "96" -%token t97 97 "97" -%token t98 98 "98" -%token t99 99 "99" -%token t100 100 "100" -%token t101 101 "101" -%token t102 102 "102" -%token t103 103 "103" -%token t104 104 "104" -%token t105 105 "105" -%token t106 106 "106" -%token t107 107 "107" -%token t108 108 "108" -%token t109 109 "109" -%token t110 110 "110" -%token t111 111 "111" -%token t112 112 "112" -%token t113 113 "113" -%token t114 114 "114" -%token t115 115 "115" -%token t116 116 "116" -%token t117 117 "117" -%token t118 118 "118" -%token t119 119 "119" -%token t120 120 "120" -%token t121 121 "121" -%token t122 122 "122" -%token t123 123 "123" -%token t124 124 "124" -%token t125 125 "125" -%token t126 126 "126" -%token t127 127 "127" -%token t128 128 "128" -%token t129 129 "129" -%token t130 130 "130" -%token t131 131 "131" -%token t132 132 "132" -%token t133 133 "133" -%token t134 134 "134" -%token t135 135 "135" -%token t136 136 "136" -%token t137 137 "137" -%token t138 138 "138" -%token t139 139 "139" -%token t140 140 "140" -%token t141 141 "141" -%token t142 142 "142" -%token t143 143 "143" -%token t144 144 "144" -%token t145 145 "145" -%token t146 146 "146" -%token t147 147 "147" -%token t148 148 "148" -%token t149 149 "149" -%token t150 150 "150" -%token t151 151 "151" -%token t152 152 "152" -%token t153 153 "153" -%token t154 154 "154" -%token t155 155 "155" -%token t156 156 "156" -%token t157 157 "157" -%token t158 158 "158" -%token t159 159 "159" -%token t160 160 "160" -%token t161 161 "161" -%token t162 162 "162" -%token t163 163 "163" -%token t164 164 "164" -%token t165 165 "165" -%token t166 166 "166" -%token t167 167 "167" -%token t168 168 "168" -%token t169 169 "169" -%token t170 170 "170" -%token t171 171 "171" -%token t172 172 "172" -%token t173 173 "173" -%token t174 174 "174" -%token t175 175 "175" -%token t176 176 "176" -%token t177 177 "177" -%token t178 178 "178" -%token t179 179 "179" -%token t180 180 "180" -%token t181 181 "181" -%token t182 182 "182" -%token t183 183 "183" -%token t184 184 "184" -%token t185 185 "185" -%token t186 186 "186" -%token t187 187 "187" -%token t188 188 "188" -%token t189 189 "189" -%token t190 190 "190" -%token t191 191 "191" -%token t192 192 "192" -%token t193 193 "193" -%token t194 194 "194" -%token t195 195 "195" -%token t196 196 "196" -%token t197 197 "197" -%token t198 198 "198" -%token t199 199 "199" -%token t200 200 "200" -%% input: - exp { assert ($1 == 0); $$ = $1; } -| input exp { assert ($2 == $1 + 1); $$ = $2; } -; - -exp: - END - { $$ = 0; } -| "1" END - { $$ = 1; } -| "1" "2" END - { $$ = 2; } -| "1" "2" "3" END - { $$ = 3; } -| "1" "2" "3" "4" END - { $$ = 4; } -| "1" "2" "3" "4" "5" END - { $$ = 5; } -| "1" "2" "3" "4" "5" "6" END - { $$ = 6; } -| "1" "2" "3" "4" "5" "6" "7" END - { $$ = 7; } -| "1" "2" "3" "4" "5" "6" "7" "8" END - { $$ = 8; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" END - { $$ = 9; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" END - { $$ = 10; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" END - { $$ = 11; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" END - { $$ = 12; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" END - { $$ = 13; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" END - { $$ = 14; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" END - { $$ = 15; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - END - { $$ = 16; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" END - { $$ = 17; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" END - { $$ = 18; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" END - { $$ = 19; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" END - { $$ = 20; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" END - { $$ = 21; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" END - { $$ = 22; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" END - { $$ = 23; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" END - { $$ = 24; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" END - { $$ = 25; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" END - { $$ = 26; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" END - { $$ = 27; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" END - { $$ = 28; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" END - { $$ = 29; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - END - { $$ = 30; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" END - { $$ = 31; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" END - { $$ = 32; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" END - { $$ = 33; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" END - { $$ = 34; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" END - { $$ = 35; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" END - { $$ = 36; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" END - { $$ = 37; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" END - { $$ = 38; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" END - { $$ = 39; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" END - { $$ = 40; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" END - { $$ = 41; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" END - { $$ = 42; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" END - { $$ = 43; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - END - { $$ = 44; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" END - { $$ = 45; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" END - { $$ = 46; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" END - { $$ = 47; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" END - { $$ = 48; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" END - { $$ = 49; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" END - { $$ = 50; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" END - { $$ = 51; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" END - { $$ = 52; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" END - { $$ = 53; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" END - { $$ = 54; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" END - { $$ = 55; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" END - { $$ = 56; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" END - { $$ = 57; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - END - { $$ = 58; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" END - { $$ = 59; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" END - { $$ = 60; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" END - { $$ = 61; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" END - { $$ = 62; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" END - { $$ = 63; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" END - { $$ = 64; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" END - { $$ = 65; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" END - { $$ = 66; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" END - { $$ = 67; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" END - { $$ = 68; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" END - { $$ = 69; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" END - { $$ = 70; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" END - { $$ = 71; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - END - { $$ = 72; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" END - { $$ = 73; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" END - { $$ = 74; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" END - { $$ = 75; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" END - { $$ = 76; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" END - { $$ = 77; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" END - { $$ = 78; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" END - { $$ = 79; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" END - { $$ = 80; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" END - { $$ = 81; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" END - { $$ = 82; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" END - { $$ = 83; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" END - { $$ = 84; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" END - { $$ = 85; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - END - { $$ = 86; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" END - { $$ = 87; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" END - { $$ = 88; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" END - { $$ = 89; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" END - { $$ = 90; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" END - { $$ = 91; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" END - { $$ = 92; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" END - { $$ = 93; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" END - { $$ = 94; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" END - { $$ = 95; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" END - { $$ = 96; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" END - { $$ = 97; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" END - { $$ = 98; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" END - { $$ = 99; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - END - { $$ = 100; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" END - { $$ = 101; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" END - { $$ = 102; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" END - { $$ = 103; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" END - { $$ = 104; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" END - { $$ = 105; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" END - { $$ = 106; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" END - { $$ = 107; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" END - { $$ = 108; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" END - { $$ = 109; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" END - { $$ = 110; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" END - { $$ = 111; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - END - { $$ = 112; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" END - { $$ = 113; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" END - { $$ = 114; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" END - { $$ = 115; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" END - { $$ = 116; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" END - { $$ = 117; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" END - { $$ = 118; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" END - { $$ = 119; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" END - { $$ = 120; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" END - { $$ = 121; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" END - { $$ = 122; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" END - { $$ = 123; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - END - { $$ = 124; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" END - { $$ = 125; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" END - { $$ = 126; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" END - { $$ = 127; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" END - { $$ = 128; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" END - { $$ = 129; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" END - { $$ = 130; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" END - { $$ = 131; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" END - { $$ = 132; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" END - { $$ = 133; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" END - { $$ = 134; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" END - { $$ = 135; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - END - { $$ = 136; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" END - { $$ = 137; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" END - { $$ = 138; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" END - { $$ = 139; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" END - { $$ = 140; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" END - { $$ = 141; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" END - { $$ = 142; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" END - { $$ = 143; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" END - { $$ = 144; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" END - { $$ = 145; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" END - { $$ = 146; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" END - { $$ = 147; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - END - { $$ = 148; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" END - { $$ = 149; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" END - { $$ = 150; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" END - { $$ = 151; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" END - { $$ = 152; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" END - { $$ = 153; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" END - { $$ = 154; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" END - { $$ = 155; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" END - { $$ = 156; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" END - { $$ = 157; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" END - { $$ = 158; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" END - { $$ = 159; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - END - { $$ = 160; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" END - { $$ = 161; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" END - { $$ = 162; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" END - { $$ = 163; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" END - { $$ = 164; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" END - { $$ = 165; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" END - { $$ = 166; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" END - { $$ = 167; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" END - { $$ = 168; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" END - { $$ = 169; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" END - { $$ = 170; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" END - { $$ = 171; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - END - { $$ = 172; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" END - { $$ = 173; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" END - { $$ = 174; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" END - { $$ = 175; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" END - { $$ = 176; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" END - { $$ = 177; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" END - { $$ = 178; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" END - { $$ = 179; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" END - { $$ = 180; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" END - { $$ = 181; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" END - { $$ = 182; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" END - { $$ = 183; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - END - { $$ = 184; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" END - { $$ = 185; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" END - { $$ = 186; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" END - { $$ = 187; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" END - { $$ = 188; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" END - { $$ = 189; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" END - { $$ = 190; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" END - { $$ = 191; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" END - { $$ = 192; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" END - { $$ = 193; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" END - { $$ = 194; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" END - { $$ = 195; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - END - { $$ = 196; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" END - { $$ = 197; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" END - { $$ = 198; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" END - { $$ = 199; } -| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" - "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" - "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" - "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" - "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" - "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" - "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" - "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" - "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" - "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" - "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" - "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" - "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" - "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" - "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" - "197" "198" "199" "200" END - { $$ = 200; } -; -%% - - - - -/* A C error reporting function. */ -/* !POSIX */ static -void yyerror (const char *msg) -{ - fprintf (stderr, "%s\n", msg); -} -static int -yylex (void) -{ - static int inner = 1; - static int outer = 0; - if (outer > MAX) - return 0; - else if (inner > outer) - { - inner = 1; - ++outer; - return END; - } - return inner++; -} -#include /* getenv. */ -#include /* strcmp. */ -int -main (int argc, char const* argv[]) -{ - (void) argc; - (void) argv; - return yyparse (); -} +stderr: + | (* *) + (*) + (*) +./calc.at:1489: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token invalid token () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +611. torture.at:275: testing State number type: 65536 states ... +./torture.at:275: ruby $abs_top_srcdir/tests/linear 65536 >input.y || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) Starting parse Entering state 0 Reading a token @@ -208797,17 +210685,72 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 Next token is token '*' () @@ -208818,7 +210761,79 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr +--- /dev/null 2023-04-20 00:26:40.000000000 +1400 ++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/stderr 2023-04-20 07:48:36.394917922 +1400 +@@ -0,0 +1 @@ ++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/611/test-source: line 14: ruby: command not found +stderr: +stderr: +611. torture.at:275: ./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -208833,80 +210848,164 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error on token [invalid token] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token invalid token () + skipped (torture.at:275) +612. torture.at:276: testing State number type: 65537 states ... +./torture.at:276: ruby $abs_top_srcdir/tests/linear 65537 >input.y || exit 77 +./calc.at:1491: cat stderr +613. torture.at:385: testing Many lookahead tokens ... +./torture.at:387: "$PERL" -w ./gengram.pl 1000 || exit 77 +--- /dev/null 2023-04-20 00:26:40.000000000 +1400 ++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/stderr 2023-04-20 07:48:36.442916071 +1400 +@@ -0,0 +1 @@ ++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/612/test-source: line 14: ruby: command not found +input: +612. torture.at:276: | error +./calc.at:1486: $PREPARSER ./calc input + skipped (torture.at:276) + +stderr: +input: +./calc.at:1487: cat stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | + | +1 +./calc.at:1491: $PREPARSER ./calc input +614. torture.at:485: testing Exploding the Stack Size with Alloca ... +./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +stderr: +./calc.at:1489: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +input: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token () +syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token () + +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: + | 1 = 2 = 3 +./calc.at:1487: $PREPARSER ./calc input %define parse.error verbose %debug %{ @@ -210918,7 +213017,8 @@ | n894 "894" { assert ($1 == 894); $$ = $1; } | n895 "895" { assert ($1 == 895); $$ = $1; } | n896 "896" { assert ($1 == 896); $$ = $1; } -| n897 "897" { assert ($1 == 897); $$ = $1; } +| n897 "897" { assert ($1 =input: += 897); $$ = $1; } | n898 "898" { assert ($1 == 898); $$ = $1; } | n899 "899" { assert ($1 == 899); $$ = $1; } | n900 "900" { assert ($1 == 900); $$ = $1; } @@ -211158,135 +213258,7 @@ n133: token { $$ = 133; }; n134: token { $$ = 134; }; n135: token { $$ = 135; }; -nstderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 119): - $1 = token '-' () - $2 = token error () -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./torture.at:494: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -136: token { $$ = 136; }; +n136: token { $$ = 136; }; n137: token { $$ = 137; }; n138: token { $$ = 138; }; n139: token { $$ = 139; }; @@ -212190,1716 +214162,3595 @@ (void) argv; return yyparse (); } -./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -input: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1491: $PREPARSER ./calc input -./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -./calc.at:1486: cat stderr -./calc.at:1487: cat stderr -616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ... -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) -Entering state 20 -Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 -Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 +Next token is token '=' () +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +stderr: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:393: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +stderr: +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +./calc.at:1486: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) +Next token is token '=' () +syntax error on token ['='] (expected: ['-'] ['+'] ['*'] ['/'] ['^']) +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +input: +./calc.at:1491: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1489: $PREPARSER ./calc input +615. torture.at:531: testing Exploding the Stack Size with Malloc ... +stderr: +./calc.at:1491: $PREPARSER ./calc /dev/null +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 -Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) -Entering state 4 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./torture.at:535: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./torture.at:494: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: + | 1 = 2 = 3 +./calc.at:1486: $PREPARSER ./calc input +Starting parse +Entering state 0 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1487: cat stderr +stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 27 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | + | +1 +./calc.at:1487: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) -Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Next token is token '=' () +syntax error, unexpected '=' +Error: popping nterm exp (2) +Error: popping token '=' () +Error: popping nterm exp (1) +Cleanup: discarding lookahead token '=' () +616. existing.at:74: testing GNU AWK 3.1.0 Grammar: LALR(1) ... +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (14.1: ) -Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' () +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +stderr: +./calc.at:1489: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 87): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error on token ['+'] (expected: [end of file] [number] ['-'] ['\n'] ['('] ['!']) +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () ./torture.at:535: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: cat stderr input: -./torture.at:494: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: - | - | +1 + | (#) + (#) = 2222 +./calc.at:1489: $PREPARSER ./calc input stderr: -./calc.at:1486: $PREPARSER ./calc input - | (* *) + (*) + (*) +input: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Next token is token '=' (1.11: ) +Reading a token +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (1.14-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1487: cat stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) Entering state 29 -Next token is token '=' (2.12: ) +Reading a token +Next token is token '+' (1.20: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 -Next token is token '\n' (4.10-5.0: ) +Reading a token +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1487: $PREPARSER ./calc /dev/null +stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token -Next token is token '\n' (5.11-6.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (2222) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +input: + | + | +1 +./calc.at:1486: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 +Now at end of input. +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token end of file () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (10.16-11.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () Entering state 3 Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Now at end of input. +syntax error on token [end of file] (expected: [number] ['-'] ['\n'] ['('] ['!']) +Cleanup: discarding lookahead token end of file () +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +./calc.at:1489: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' () +Shifting token '\n' () +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Next token is token '+' () +syntax error, unexpected '+' +Error: popping nterm input () +Cleanup: discarding lookahead token '+' () +./calc.at:1491: cat stderr +stdout: +%code top { /* -*- c -*- */ +/* Adjust to the compiler. + We used to do it here, but each time we add a new line, + we have to adjust all the line numbers in error messages. + It's simpler to use a constant include to a varying file. */ +#include +} + +%define parse.error verbose +%debug +%{ +#include +#include +#include +#define MAX 200 +static int yylex (void); +#include + +/* !POSIX */ static void yyerror (const char *msg); +%} +%union +{ + int val; +}; + +%token END "end" +%type exp input +%token t1 1 "1" +%token t2 2 "2" +%token t3 3 "3" +%token t4 4 "4" +%token t5 5 "5" +%token t6 6 "6" +%token t7 7 "7" +%token t8 8 "8" +%token t9 9 "9" +%token t10 10 "10" +%token t11 11 "11" +%token t12 12 "12" +%token t13 13 "13" +%token t14 14 "14" +%token t15 15 "15" +%token t16 16 "16" +%token t17 17 "17" +%token t18 18 "18" +%token t19 19 "19" +%token t20 20 "20" +%token t21 21 "21" +%token t22 22 "22" +%token t23 23 "23" +%token t24 24 "24" +%token t25 25 "25" +%token t26 26 "26" +%token t27 27 "27" +%token t28 28 "28" +%token t29 29 "29" +%token t30 30 "30" +%token t31 31 "31" +%token t32 32 "32" +%token t33 33 "33" +%token t34 34 "34" +%token t35 35 "35" +%token t36 36 "36" +%token t37 37 "37" +%token t38 38 "38" +%token t39 39 "39" +%token t40 40 "40" +%token t41 41 "41" +%token t42 42 "42" +%token t43 43 "43" +%token t44 44 "44" +%token t45 45 "45" +%token t46 46 "46" +%token t47 47 "47" +%token t48 48 "48" +%token t49 49 "49" +%token t50 50 "50" +%token t51 51 "51" +%token t52 52 "52" +%token t53 53 "53" +%token t54 54 "54" +%token t55 55 "55" +%token t56 56 "56" +%token t57 57 "57" +%token t58 58 "58" +%token t59 59 "59" +%token t60 60 "60" +%token t61 61 "61" +%token t62 62 "62" +%token t63 63 "63" +%token t64 64 "64" +%token t65 65 "65" +%token t66 66 "66" +%token t67 67 "67" +%token t68 68 "68" +%token t69 69 "69" +%token t70 70 "70" +%token t71 71 "71" +%token t72 72 "72" +%token t73 73 "73" +%token t74 74 "74" +%token t75 75 "75" +%token t76 76 "76" +%token t77 77 "77" +%token t78 78 "78" +%token t79 79 "79" +%token t80 80 "80" +%token t81 81 "81" +%token t82 82 "82" +%token t83 83 "83" +%token t84 84 "84" +%token t85 85 "85" +%token t86 86 "86" +%token t87 87 "87" +%token t88 88 "88" +%token t89 89 "89" +%token t90 90 "90" +%token t91 91 "91" +%token t92 92 "92" +%token t93 93 "93" +%token t94 94 "94" +%token t95 95 "95" +%token t96 96 "96" +%token t97 97 "97" +%token t98 98 "98" +%token t99 99 "99" +%token t100 100 "100" +%token t101 101 "101" +%token t102 102 "102" +%token t103 103 "103" +%token t104 104 "104" +%token t105 105 "105" +%token t106 106 "106" +%token t107 107 "107" +%token t108 108 "108" +%token t109 109 "109" +%token t110 110 "110" +%token t111 111 "111" +%token t112 112 "112" +%token t113 113 "113" +%token t114 114 "114" +%token t115 115 "115" +%token t116 116 "116" +%token t117 117 "117" +%token t118 118 "118" +%token t119 119 "119" +%token t120 120 "120" +%token t121 121 "121" +%token t122 122 "122" +%token t123 123 "123" +%token t124 124 "124" +%token t125 125 "125" +%token t126 126 "126" +%token t127 127 "127" +%token t128 128 "128" +%token t129 129 "129" +%token t130 130 "130" +%token t131 131 "131" +%token t132 132 "132" +%token t133 133 "133" +%token t134 134 "134" +%token t135 135 "135" +%token t136 136 "136" +%token t137 137 "137" +%token t138 138 "138" +%token t139 139 "139" +%token t140 140 "140" +%token t141 141 "141" +%token t142 142 "142" +%token t143 143 "143" +%token t144 144 "144" +%token t145 145 "145" +%token t146 146 "146" +%token t147 147 "147" +%token t148 148 "148" +%token t149 149 "149" +%token t150 150 "150" +%token t151 151 "151" +%token t152 152 "152" +%token t153 153 "153" +%token t154 154 "154" +%token t155 155 "155" +%token t156 156 "156" +%token t157 157 "157" +%token t158 158 "158" +%token t159 159 "159" +%token t160 160 "160" +%token t161 161 "161" +%token t162 162 "162" +%token t163 163 "163" +%token t164 164 "164" +%token t165 165 "165" +%token t166 166 "166" +%token t167 167 "167" +%token t168 168 "168" +%token t169 169 "169" +%token t170 170 "170" +%token t171 171 "171" +%token t172 172 "172" +%token t173 173 "173" +%token t174 174 "174" +%token t175 175 "175" +%token t176 176 "176" +%token t177 177 "177" +%token t178 178 "178" +%token t179 179 "179" +%token t180 180 "180" +%token t181 181 "181" +%token t182 182 "182" +%token t183 183 "183" +%token t184 184 "184" +%token t185 185 "185" +%token t186 186 "186" +%token t187 187 "187" +%token t188 188 "188" +%token t189 189 "189" +%token t190 190 "190" +%token t191 191 "191" +%token t192 192 "192" +%token t193 193 "193" +%token t194 194 "194" +%token t195 195 "195" +%token t196 196 "196" +%token t197 197 "197" +%token t198 198 "198" +%token t199 199 "199" +%token t200 200 "200" +%% +input: + exp { assert ($1 == 0); $$ = $1; } +| input exp { assert ($2 == $1 + 1); $$ = $2; } +; + +exp: + END + { $$ = 0; } +| "1" END + { $$ = 1; } +| "1" "2" END + { $$ = 2; } +| "1" "2" "3" END + { $$ = 3; } +| "1" "2" "3" "4" END + { $$ = 4; } +| "1" "2" "3" "4" "5" END + { $$ = 5; } +| "1" "2" "3" "4" "5" "6" END + { $$ = 6; } +| "1" "2" "3" "4" "5" "6" "7" END + { $$ = 7; } +| "1" "2" "3" "4" "5" "6" "7" "8" END + { $$ = 8; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" END + { $$ = 9; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" END + { $$ = 10; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" END + { $$ = 11; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" END + { $$ = 12; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" END + { $$ = 13; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" END + { $$ = 14; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" END + { $$ = 15; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + END + { $$ = 16; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" END + { $$ = 17; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" END + { $$ = 18; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" END + { $$ = 19; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" END + { $$ = 20; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" END + { $$ = 21; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" END + { $$ = 22; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" END + { $$ = 23; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" END + { $$ = 24; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" END + { $$ = 25; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" END + { $$ = 26; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" END + { $$ = 27; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" END + { $$ = 28; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" END + { $$ = 29; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + END + { $$ = 30; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" END + { $$ = 31; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" END + { $$ = 32; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" END + { $$ = 33; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" END + { $$ = 34; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" END + { $$ = 35; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" END + { $$ = 36; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" END + { $$ = 37; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" END + { $$ = 38; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" END + { $$ = 39; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" END + { $$ = 40; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" END + { $$ = 41; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" END + { $$ = 42; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" END + { $$ = 43; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + END + { $$ = 44; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" END + { $$ = 45; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" END + { $$ = 46; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" END + { $$ = 47; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" END + { $$ = 48; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" END + { $$ = 49; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" END + { $$ = 50; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" END + { $$ = 51; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" END + { $$ = 52; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" END + { $$ = 53; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" END + { $$ = 54; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" END + { $$ = 55; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" END + { $$ = 56; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" END + { $$ = 57; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + END + { $$ = 58; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" END + { $$ = 59; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" END + { $$ = 60; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" END + { $$ = 61; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" END + { $$ = 62; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" END + { $$ = 63; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" END + { $$ = 64; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" END + { $$ = 65; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" END + { $$ = 66; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" END + { $$ = 67; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" END + { $$ = 68; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" END + { $$ = 69; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" END + { $$ = 70; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" END + { $$ = 71; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + END + { $$ = 72; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" END + { $$ = 73; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" END + { $$ = 74; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" END + { $$ = 75; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" END + { $$ = 76; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" END + { $$ = 77; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" END + { $$ = 78; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" END + { $$ = 79; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" END + { $$ = 80; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" END + { $$ = 81; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" END + { $$ = 82; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" END + { $$ = 83; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" END + { $$ = 84; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" END + { $$ = 85; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + END + { $$ = 86; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" END + { $$ = 87; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" END + { $$ = 88; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" END + { $$ = 89; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" END + { $$ = 90; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" END + { $$ = 91; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" END + { $$ = 92; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" END + { $$ = 93; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" END + { $$ = 94; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" END + { $$ = 95; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" END + { $$ = 96; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" END + { $$ = 97; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" END + { $$ = 98; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" END + { $$ = 99; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + END + { $$ = 100; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" END + { $$ = 101; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" END + { $$ = 102; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" END + { $$ = 103; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" END + { $$ = 104; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" END + { $$ = 105; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" END + { $$ = 106; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" END + { $$ = 107; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" END + { $$ = 108; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" END + { $$ = 109; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" END + { $$ = 110; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" END + { $$ = 111; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + END + { $$ = 112; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" END + { $$ = 113; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" END + { $$ = 114; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" END + { $$ = 115; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" END + { $$ = 116; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" END + { $$ = 117; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" END + { $$ = 118; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" END + { $$ = 119; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" END + { $$ = 120; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" END + { $$ = 121; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" END + { $$ = 122; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" END + { $$ = 123; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + END + { $$ = 124; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" END + { $$ = 125; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" END + { $$ = 126; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" END + { $$ = 127; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" END + { $$ = 128; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" END + { $$ = 129; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" END + { $$ = 130; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" END + { $$ = 131; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" END + { $$ = 132; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" END + { $$ = 133; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" END + { $$ = 134; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" END + { $$ = 135; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + END + { $$ = 136; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" END + { $$ = 137; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" END + { $$ = 138; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" END + { $$ = 139; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" END + { $$ = 140; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" END + { $$ = 141; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" END + { $$ = 142; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" END + { $$ = 143; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" END + { $$ = 144; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" END + { $$ = 145; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" END + { $$ = 146; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" END + { $$ = 147; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + END + { $$ = 148; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" END + { $$ = 149; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" END + { $$ = 150; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" END + { $$ = 151; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" END + { $$ = 152; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" END + { $$ = 153; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" END + { $$ = 154; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" END + { $$ = 155; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" END + { $$ = 156; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" END + { $$ = 157; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" END + { $$ = 158; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" END + { $$ = 159; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + END + { $$ = 160; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" END + { $$ = 161; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" END + { $$ = 162; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" END + { $$ = 163; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" END + { $$ = 164; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" END + { $$ = 165; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" END + { $$ = 166; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" END + { $$ = 167; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" END + { $$ = 168; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" END + { $$ = 169; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" END + { $$ = 170; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" END + { $$ = 171; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + END + { $$ = 172; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" END + { $$ = 173; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" END + { $$ = 174; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" END + { $$ = 175; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" END + { $$ = 176; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" END + { $$ = 177; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" END + { $$ = 178; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" END + { $$ = 179; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" END + { $$ = 180; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" END + { $$ = 181; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" END + { $$ = 182; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" END + { $$ = 183; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + END + { $$ = 184; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" END + { $$ = 185; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" END + { $$ = 186; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" END + { $$ = 187; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" END + { $$ = 188; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" END + { $$ = 189; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" END + { $$ = 190; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" END + { $$ = 191; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" END + { $$ = 192; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" END + { $$ = 193; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" END + { $$ = 194; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" END + { $$ = 195; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + END + { $$ = 196; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" END + { $$ = 197; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" END + { $$ = 198; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" END + { $$ = 199; } +| "1" "2" "3" "4" "5" "6" "7" "8" "9" "10" "11" "12" "13" "14" "15" "16" + "17" "18" "19" "20" "21" "22" "23" "24" "25" "26" "27" "28" "29" "30" + "31" "32" "33" "34" "35" "36" "37" "38" "39" "40" "41" "42" "43" "44" + "45" "46" "47" "48" "49" "50" "51" "52" "53" "54" "55" "56" "57" "58" + "59" "60" "61" "62" "63" "64" "65" "66" "67" "68" "69" "70" "71" "72" + "73" "74" "75" "76" "77" "78" "79" "80" "81" "82" "83" "84" "85" "86" + "87" "88" "89" "90" "91" "92" "93" "94" "95" "96" "97" "98" "99" "100" + "101" "102" "103" "104" "105" "106" "107" "108" "109" "110" "111" "112" + "113" "114" "115" "116" "117" "118" "119" "120" "121" "122" "123" "124" + "125" "126" "127" "128" "129" "130" "131" "132" "133" "134" "135" "136" + "137" "138" "139" "140" "141" "142" "143" "144" "145" "146" "147" "148" + "149" "150" "151" "152" "153" "154" "155" "156" "157" "158" "159" "160" + "161" "162" "163" "164" "165" "166" "167" "168" "169" "170" "171" "172" + "173" "174" "175" "176" "177" "178" "179" "180" "181" "182" "183" "184" + "185" "186" "187" "188" "189" "190" "191" "192" "193" "194" "195" "196" + "197" "198" "199" "200" END + { $$ = 200; } +; +%% + + + + +/* A C error reporting function. */ +/* !POSIX */ static +void yyerror (const char *msg) +{ + fprintf (stderr, "%s\n", msg); +} +static int +yylex (void) +{ + static int inner = 1; + static int outer = 0; + if (outer > MAX) + return 0; + else if (inner > outer) + { + inner = 1; + ++outer; + return END; + } + return inner++; +} +#include /* getenv. */ +#include /* strcmp. */ +int +main (int argc, char const* argv[]) +{ + (void) argc; + (void) argv; + return yyparse (); +} +input: +./calc.at:1487: cat stderr + | (1 + #) = 1111 +./calc.at:1489: $PREPARSER ./calc input +input: +./torture.at:139: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +stderr: + | (!!) + (1 2) = 1 +./calc.at:1491: $PREPARSER ./calc input +input: +Starting parse +Entering state 0 Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) -Entering state 4 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1487: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1487: $PREPARSER ./calc input +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1486: cat stderr stderr: -617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ... stderr: -input: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y Starting parse Entering state 0 Reading a token @@ -213907,154 +217758,76 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) -> $$ = nterm exp (1111) -Entering state 29 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (2222) - $2 = token '+' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () - | 1 2 -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) Starting parse Entering state 0 Reading a token @@ -214062,17 +217835,9 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -214092,14 +217857,61 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () Entering state 11 Next token is token ')' () Shifting token ')' () @@ -214133,6 +217945,12 @@ Next token is token '*' () Error: discarding token '*' () Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -214145,109 +217963,20 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '\n' () +Next token is token '+' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (3333) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -./calc.at:1487: cat stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stdout: -input: -./calc.at:1489: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc - - | 1 + 2 * 3 + !+ ++ -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1491: cat stderr -stderr: -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) @@ -214255,11 +217984,11 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token number (2) Shifting token number (2) @@ -214267,56 +217996,50 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 29 +Entering state 30 Reading a token Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (1) + $2 = token '*' () + $3 = nterm exp (2) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token '*' () Shifting token '*' () Entering state 21 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 -Next token is token '+' () +Reading a token +Next token is token '=' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) + $1 = nterm exp (3333) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1486: cat stderr -input: -Starting parse -Entering state 0 +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token Next token is token number (1) Shifting token number (1) @@ -214324,84 +218047,38 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 +Entering state 27 Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) + $2 = token '=' () + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 121): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: $PREPARSER ./calc /dev/null - | 1//2 -./calc.at:1491: $PREPARSER ./calc input -input: -input: stderr: - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 - | 1 + 2 * 3 + !- ++ -./calc.at:1487: $PREPARSER ./calc input -./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -214409,30 +218086,36 @@ syntax error, unexpected end of input Cleanup: discarding lookahead token end of input () stderr: -stderr: ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -Starting parse -Entering state 0 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) @@ -214440,997 +218123,346 @@ Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 Next token is token '+' () Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2) --> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 105): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 Next token is token '+' () Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1) + $1 = nterm exp (2) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 122): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token + $3 = nterm exp (1) +-> $$ = nterm exp (3) +Entering state 12 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) +Next token is token ')' () +syntax error on token [')'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '+' () +Error: popping nterm exp (3) +Shifting token error () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '(' () +Shifting token '(' () +Entering state 4 Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token Next token is token number (2) Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2) -> $$ = nterm exp (2) -Entering state 28 +Entering state 30 Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): +Next token is token '*' () +Reducing stack 0 by rule 9 (line 105): $1 = nterm exp (1) - $2 = token '-' () + $2 = token '*' () $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) - $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) -> $$ = nterm exp (2) Entering state 12 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Error: popping token '*' () +Error: popping nterm exp (2) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 +-> $$ = nterm exp (1111) +Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (3333) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (4444) Entering state 8 Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (4444) $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) + $3 = nterm exp (1) +error: 4444 != 1 +-> $$ = nterm exp (4444) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (4444) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () +Starting parse +Entering state 0 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) +Now at end of input. +syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input () ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -215441,883 +218473,219 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./calc.at:1489: cat stderr +./calc.at:1491: cat stderr ./calc.at:1487: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +input: + | (# + 1) = 1111 +./calc.at:1486: cat stderr +./calc.at:1489: $PREPARSER ./calc input +input: + | (- *) + (1 2) = 1 +./calc.at:1491: $PREPARSER ./calc input +stderr: + | (!!) + (1 2) = 1 +./calc.at:1487: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (7) -Shifting token number (7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (7) --> $$ = nterm exp (7) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7) - $2 = token '=' () - $3 = nterm exp (7) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 10 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (3) --> $$ = nterm exp (-3) -Entering state 30 -Next token is token '=' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (-3) --> $$ = nterm exp (-6) -Entering state 29 -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (-6) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (5) -Shifting token number (5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (5) --> $$ = nterm exp (5) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (5) --> $$ = nterm exp (-5) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-5) - $2 = token '=' () - $3 = nterm exp (-5) --> $$ = nterm exp (-5) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-5) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () Reading a token Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 +Error: discarding token number (1) Reading a token Next token is token ')' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 12 +Entering state 11 Next token is token ')' () Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () - $2 = nterm exp (-1) + $2 = token error () $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (-1) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (1) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1) - $2 = token '=' () - $3 = nterm exp (1) --> $$ = nterm exp (1) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 Reading a token Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (-1) --> $$ = nterm exp (1) -Entering state 10 -Next token is token '=' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 10 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (1) --> $$ = nterm exp (-1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-1) - $2 = token '=' () - $3 = nterm exp (-1) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-1) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 28 -Reading a token -Next token is token '-' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (2) --> $$ = nterm exp (-1) -Entering state 8 -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (-1) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-4) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token number (4) -Shifting token number (4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (4) --> $$ = nterm exp (4) -Entering state 10 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' () - $2 = nterm exp (4) --> $$ = nterm exp (-4) -Entering state 27 -Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (-4) + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (-4) --> $$ = nterm exp (-4) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (-4) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 28 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (2) - $2 = token '-' () - $3 = nterm exp (3) --> $$ = nterm exp (-1) -Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (-1) - $3 = token ')' () --> $$ = nterm exp (-1) -Entering state 28 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (-1) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2) - $2 = token '=' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 8 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (8) -Entering state 32 -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (8) --> $$ = nterm exp (256) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (256) -Shifting token number (256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (256) --> $$ = nterm exp (256) -Entering state 27 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (256) - $2 = token '=' () - $3 = nterm exp (256) --> $$ = nterm exp (256) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (256) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () -Entering state 6 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 12 -Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 -Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 32 -Reading a token -Next token is token ')' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (2) - $2 = token '^' () - $3 = nterm exp (2) --> $$ = nterm exp (4) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (4) - $3 = token ')' () --> $$ = nterm exp (4) -Entering state 8 Reading a token -Next token is token '^' () -Shifting token '^' () -Entering state 23 +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 32 +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4) - $2 = token '^' () - $3 = nterm exp (3) --> $$ = nterm exp (64) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token number (64) -Shifting token number (64) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (64) --> $$ = nterm exp (64) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (64) - $2 = token '=' () - $3 = nterm exp (64) --> $$ = nterm exp (64) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (64) - $2 = token '\n' () --> $$ = nterm line () -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input () - $2 = nterm line () --> $$ = nterm input () + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: - | 1 2 -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1491: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1486: cat stderr -./calc.at:1487: $PREPARSER ./calc input +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: -stderr: - | error -./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -216325,16 +218693,20 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 118): @@ -216352,12 +218724,21 @@ Shifting token '(' () Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -216382,19 +218763,20 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (2222) + $3 = nterm exp (1) +error: 2222 != 1 -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -216415,33 +218797,11 @@ Entering state 16 Cleanup: popping token end of file () Cleanup: popping nterm input () + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1486: $PREPARSER ./calc input ./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Cleanup: discarding lookahead token number (2) ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -input: Starting parse Entering state 0 Reading a token @@ -216456,99 +218816,62 @@ Next token is token error () Error: discarding token error () Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Error: discarding token '+' () Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token number (1) +Error: discarding token number (1) Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 103): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 +Reading a token Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (2222) --> $$ = nterm exp (2222) +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (2222) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (2222) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () stderr: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1486: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -stderr: Starting parse Entering state 0 Reading a token @@ -216797,20 +219120,235 @@ Cleanup: popping token end of input () Cleanup: popping nterm input () ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: -stdout: -./calc.at:1491: cat stderr -./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 120): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of file () +Entering state 16 +Cleanup: popping token end of file () +Cleanup: popping nterm input () stderr: Starting parse Entering state 0 @@ -217059,9 +219597,7 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1487: cat stderr -stderr: -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -217071,55 +219607,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | 1 = 2 = 3 -./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1491: $PREPARSER ./calc input -stderr: -./calc.at:1489: cat stderr -stderr: -stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -input: -./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 -stderr: - | (1 + #) = 1111 -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1487: $PREPARSER ./calc input -./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -217129,149 +219617,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -stderr: +./calc.at:1491: cat stderr +./calc.at:1487: cat stderr +./calc.at:1489: cat stderr input: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) - | 1//2 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -stderr: -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1486: cat stderr -./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -217281,435 +219631,118 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 + | (* *) + (*) + (*) +./calc.at:1491: $PREPARSER ./calc input +input: + | (- *) + (1 2) = 1 +input: +./calc.at:1487: $PREPARSER ./calc input stderr: -./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 +./calc.at:1486: cat stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -input: -stderr: -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -stderr: -memory exhausted -memory exhausted -./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -stderr: -./calc.at:1491: cat stderr -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 -Reading a token -Next token is token '/' () -syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '/' () -stderr: -memory exhausted -memory exhausted -stderr: -./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted -memory exhausted -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () +Next token is token ')' (1.5: ) Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token ')' () +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -input: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - | - | +1 -./calc.at:1491: $PREPARSER ./calc input -stderr: -./calc.at:1487: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token ')' () +Next token is token ')' (1.17: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -memory exhausted -memory exhausted -./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1487: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -217717,206 +219750,15 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of file () -Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1489: cat stderr -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:510: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 118): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 92): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 88): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 82): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token Now at end of input. -Shifting token end of file () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of file () -Cleanup: popping nterm input () -./calc.at:1486: cat stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | error -./calc.at:1489: $PREPARSER ./calc input -stderr: -./calc.at:1491: cat stderr -stderr: -stdout: -input: - | (- *) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc /dev/null -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1492: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1487: cat stderr -./torture.at:548: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + | (1 + # + 1) = 1111 +./calc.at:1489: $PREPARSER ./calc input stderr: -input: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -217930,10 +219772,10 @@ Entering state 2 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Shifting token error () Entering state 9 -Reducing stack 0 by rule 15 (line 106): +Reducing stack 0 by rule 15 (line 119): $1 = token '-' () $2 = token error () Shifting token error () @@ -217946,7 +219788,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -217964,13 +219806,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 Reading a token Next token is token number (2) -syntax error, unexpected number +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) Error: popping nterm exp (1) Shifting token error () Entering state 11 @@ -217982,7 +219824,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -217990,7 +219832,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -218003,13 +219845,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (1) @@ -218019,57 +219861,24 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -input: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) - | (1 + # + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1487: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token () -syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token () -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +input: Starting parse Entering state 0 Reading a token @@ -218080,7 +219889,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -218109,7 +219918,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -218123,13 +219932,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -218138,858 +219947,259 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (!!) + (1 2) = 1 +stderr: +./calc.at:1486: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) Entering state 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) Entering state 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +stdout: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 +Next token is token '-' () +Shifting token '-' () +Entering state 2 Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 119): + $1 = token '-' () + $2 = token error () +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 -Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) -Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +Next token is token number (2) +syntax error on token [number] (expected: ['='] ['-'] ['+'] ['*'] ['/'] ['^'] [')']) +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) +Next token is token '=' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 27 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) +Cleanup: popping token end of file () +Cleanup: popping nterm input () Starting parse Entering state 0 Reading a token @@ -218997,25 +220207,20 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () Reading a token Next token is token ')' () -Entering state 11 -Next token is token ')' () Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): @@ -219107,46 +220312,18 @@ Cleanup: popping token end of input () Cleanup: popping nterm input () stderr: -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./torture.at:497: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 Starting parse Entering state 0 Reading a token @@ -219157,7 +220334,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -219186,7 +220363,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 118): +Reducing stack 0 by rule 14 (line 105): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -219200,13 +220377,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 93): +Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -219215,22 +220392,141 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' () + $2 = token '!' () +Shifting token error () +Entering state 11 +Reading a token +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '=' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./torture.at:497: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: cat stderr +stderr: +./torture.at:500: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +./calc.at:1487: cat stderr +stderr: +input: ./calc.at:1489: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -219241,7 +220537,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; +./torture.at:500: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: + | 1 + 2 * 3 + !+ ++ +./calc.at:1491: $PREPARSER ./calc input +stdout: +stderr: +./torture.at:538: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +./calc.at:1489: cat stderr +input: +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -219251,6 +220556,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +stderr: + | (* *) + (*) + (*) +stderr: +./calc.at:1487: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -219286,855 +220595,165 @@ -> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): $1 = nterm exp (1.5: 2) $2 = token '*' (1.7: ) $3 = nterm exp (1.9: 3) -> $$ = nterm exp (1.5-9: 6) Entering state 29 -Next token is token '=' (1.11: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1.1: 1) $2 = token '+' (1.3: ) $3 = nterm exp (1.5-9: 6) -> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13: 7) -Shifting token "number" (1.13: 7) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13: 7) --> $$ = nterm exp (1.13: 7) -Entering state 27 -Reading a token -Next token is token '\n' (1.14-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 7) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13: 7) --> $$ = nterm exp (1.1-13: 7) -Entering state 8 -Next token is token '\n' (1.14-2.0: ) -Shifting token '\n' (1.14-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-13: 7) - $2 = token '\n' (1.14-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token "number" (2.1: 1) -Shifting token "number" (2.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.1: 1) --> $$ = nterm exp (2.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (2.3: ) -Shifting token '+' (2.3: ) +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token "number" (2.5: 2) -Shifting token "number" (2.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.5: 2) --> $$ = nterm exp (2.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (2.7: ) -Shifting token '*' (2.7: ) -Entering state 21 -Reading a token -Next token is token '-' (2.9: ) -Shifting token '-' (2.9: ) -Entering state 2 -Reading a token -Next token is token "number" (2.10: 3) -Shifting token "number" (2.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.10: 3) --> $$ = nterm exp (2.10: 3) -Entering state 10 -Reading a token -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.9: ) - $2 = nterm exp (2.10: 3) --> $$ = nterm exp (2.9-10: -3) -Entering state 30 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2.5: 2) - $2 = token '*' (2.7: ) - $3 = nterm exp (2.9-10: -3) --> $$ = nterm exp (2.5-10: -6) -Entering state 29 -Next token is token '=' (2.12: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2.1: 1) - $2 = token '+' (2.3: ) - $3 = nterm exp (2.5-10: -6) --> $$ = nterm exp (2.1-10: -5) -Entering state 8 -Next token is token '=' (2.12: ) -Shifting token '=' (2.12: ) -Entering state 18 -Reading a token -Next token is token '-' (2.14: ) -Shifting token '-' (2.14: ) -Entering state 2 -Reading a token -Next token is token "number" (2.15: 5) -Shifting token "number" (2.15: 5) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (2.15: 5) --> $$ = nterm exp (2.15: 5) -Entering state 10 -Reading a token -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (2.14: ) - $2 = nterm exp (2.15: 5) --> $$ = nterm exp (2.14-15: -5) -Entering state 27 -Next token is token '\n' (2.16-3.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2.1-10: -5) - $2 = token '=' (2.12: ) - $3 = nterm exp (2.14-15: -5) --> $$ = nterm exp (2.1-15: -5) -Entering state 8 -Next token is token '\n' (2.16-3.0: ) -Shifting token '\n' (2.16-3.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2.1-15: -5) - $2 = token '\n' (2.16-3.0: ) --> $$ = nterm line (2.1-3.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-2.0: ) - $2 = nterm line (2.1-3.0: ) --> $$ = nterm input (1.1-3.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (3.1-4.0: ) -Shifting token '\n' (3.1-4.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (3.1-4.0: ) --> $$ = nterm line (3.1-4.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-3.0: ) - $2 = nterm line (3.1-4.0: ) --> $$ = nterm input (1.1-4.0: ) -Entering state 6 -Reading a token -Next token is token '-' (4.1: ) -Shifting token '-' (4.1: ) -Entering state 2 -Reading a token -Next token is token "number" (4.2: 1) -Shifting token "number" (4.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.2: 1) --> $$ = nterm exp (4.2: 1) -Entering state 10 -Reading a token -Next token is token '^' (4.3: ) -Shifting token '^' (4.3: ) -Entering state 23 -Reading a token -Next token is token "number" (4.4: 2) -Shifting token "number" (4.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.4: 2) --> $$ = nterm exp (4.4: 2) -Entering state 32 -Reading a token -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (4.2: 1) - $2 = token '^' (4.3: ) - $3 = nterm exp (4.4: 2) --> $$ = nterm exp (4.2-4: 1) -Entering state 10 -Next token is token '=' (4.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.1: ) - $2 = nterm exp (4.2-4: 1) --> $$ = nterm exp (4.1-4: -1) -Entering state 8 -Next token is token '=' (4.6: ) -Shifting token '=' (4.6: ) -Entering state 18 -Reading a token -Next token is token '-' (4.8: ) -Shifting token '-' (4.8: ) -Entering state 2 -Reading a token -Next token is token "number" (4.9: 1) -Shifting token "number" (4.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (4.9: 1) --> $$ = nterm exp (4.9: 1) -Entering state 10 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (4.8: ) - $2 = nterm exp (4.9: 1) --> $$ = nterm exp (4.8-9: -1) -Entering state 27 -Next token is token '\n' (4.10-5.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4.1-4: -1) - $2 = token '=' (4.6: ) - $3 = nterm exp (4.8-9: -1) --> $$ = nterm exp (4.1-9: -1) -Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) -Entering state 6 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./torture.at:504: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 +./torture.at:538: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1486: cat stderr +stderr: +input: +Starting parse +Entering state 0 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) - $2 = nterm exp (5.2-3: -1) - $3 = token ')' (5.4: ) --> $$ = nterm exp (5.1-4: -1) -Entering state 8 -Reading a token -Next token is token '^' (5.5: ) -Shifting token '^' (5.5: ) -Entering state 23 -Reading a token -Next token is token "number" (5.6: 2) -Shifting token "number" (5.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.6: 2) --> $$ = nterm exp (5.6: 2) -Entering state 32 -Reading a token -Next token is token '=' (5.8: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (5.1-4: -1) - $2 = token '^' (5.5: ) - $3 = nterm exp (5.6: 2) --> $$ = nterm exp (5.1-6: 1) -Entering state 8 -Next token is token '=' (5.8: ) -Shifting token '=' (5.8: ) -Entering state 18 -Reading a token -Next token is token "number" (5.10: 1) -Shifting token "number" (5.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.10: 1) --> $$ = nterm exp (5.10: 1) -Entering state 27 -Reading a token -Next token is token '\n' (5.11-6.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (5.1-6: 1) - $2 = token '=' (5.8: ) - $3 = nterm exp (5.10: 1) --> $$ = nterm exp (5.1-10: 1) -Entering state 8 -Next token is token '\n' (5.11-6.0: ) -Shifting token '\n' (5.11-6.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (5.1-10: 1) - $2 = token '\n' (5.11-6.0: ) --> $$ = nterm line (5.1-6.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-5.0: ) - $2 = nterm line (5.1-6.0: ) --> $$ = nterm input (1.1-6.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (6.1-7.0: ) -Shifting token '\n' (6.1-7.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (6.1-7.0: ) --> $$ = nterm line (6.1-7.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-6.0: ) - $2 = nterm line (6.1-7.0: ) --> $$ = nterm input (1.1-7.0: ) -Entering state 6 -Reading a token -Next token is token '-' (7.1: ) -Shifting token '-' (7.1: ) -Entering state 2 -Reading a token -Next token is token '-' (7.2: ) -Shifting token '-' (7.2: ) -Entering state 2 -Reading a token -Next token is token '-' (7.3: ) -Shifting token '-' (7.3: ) -Entering state 2 -Reading a token -Next token is token "number" (7.4: 1) -Shifting token "number" (7.4: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.4: 1) --> $$ = nterm exp (7.4: 1) -Entering state 10 -Reading a token -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.3: ) - $2 = nterm exp (7.4: 1) --> $$ = nterm exp (7.3-4: -1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.2: ) - $2 = nterm exp (7.3-4: -1) --> $$ = nterm exp (7.2-4: 1) -Entering state 10 -Next token is token '=' (7.6: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.1: ) - $2 = nterm exp (7.2-4: 1) --> $$ = nterm exp (7.1-4: -1) -Entering state 8 -Next token is token '=' (7.6: ) -Shifting token '=' (7.6: ) -Entering state 18 -Reading a token -Next token is token '-' (7.8: ) -Shifting token '-' (7.8: ) -Entering state 2 -Reading a token -Next token is token "number" (7.9: 1) -Shifting token "number" (7.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (7.9: 1) --> $$ = nterm exp (7.9: 1) -Entering state 10 -Reading a token -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (7.8: ) - $2 = nterm exp (7.9: 1) --> $$ = nterm exp (7.8-9: -1) -Entering state 27 -Next token is token '\n' (7.10-8.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (7.1-4: -1) - $2 = token '=' (7.6: ) - $3 = nterm exp (7.8-9: -1) --> $$ = nterm exp (7.1-9: -1) -Entering state 8 -Next token is token '\n' (7.10-8.0: ) -Shifting token '\n' (7.10-8.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (7.1-9: -1) - $2 = token '\n' (7.10-8.0: ) --> $$ = nterm line (7.1-8.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-7.0: ) - $2 = nterm line (7.1-8.0: ) --> $$ = nterm input (1.1-8.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (8.1-9.0: ) -Shifting token '\n' (8.1-9.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (8.1-9.0: ) --> $$ = nterm line (8.1-9.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-8.0: ) - $2 = nterm line (8.1-9.0: ) --> $$ = nterm input (1.1-9.0: ) -Entering state 6 -Reading a token -Next token is token "number" (9.1: 1) -Shifting token "number" (9.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.1: 1) --> $$ = nterm exp (9.1: 1) -Entering state 8 -Reading a token -Next token is token '-' (9.3: ) -Shifting token '-' (9.3: ) -Entering state 19 -Reading a token -Next token is token "number" (9.5: 2) -Shifting token "number" (9.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.5: 2) --> $$ = nterm exp (9.5: 2) -Entering state 28 -Reading a token -Next token is token '-' (9.7: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1: 1) - $2 = token '-' (9.3: ) - $3 = nterm exp (9.5: 2) --> $$ = nterm exp (9.1-5: -1) -Entering state 8 -Next token is token '-' (9.7: ) -Shifting token '-' (9.7: ) -Entering state 19 -Reading a token -Next token is token "number" (9.9: 3) -Shifting token "number" (9.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.9: 3) --> $$ = nterm exp (9.9: 3) -Entering state 28 -Reading a token -Next token is token '=' (9.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (9.1-5: -1) - $2 = token '-' (9.7: ) - $3 = nterm exp (9.9: 3) --> $$ = nterm exp (9.1-9: -4) -Entering state 8 -Next token is token '=' (9.11: ) -Shifting token '=' (9.11: ) -Entering state 18 -Reading a token -Next token is token '-' (9.13: ) -Shifting token '-' (9.13: ) -Entering state 2 -Reading a token -Next token is token "number" (9.14: 4) -Shifting token "number" (9.14: 4) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (9.14: 4) --> $$ = nterm exp (9.14: 4) -Entering state 10 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (9.13: ) - $2 = nterm exp (9.14: 4) --> $$ = nterm exp (9.13-14: -4) -Entering state 27 -Next token is token '\n' (9.15-10.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (9.1-9: -4) - $2 = token '=' (9.11: ) - $3 = nterm exp (9.13-14: -4) --> $$ = nterm exp (9.1-14: -4) -Entering state 8 -Next token is token '\n' (9.15-10.0: ) -Shifting token '\n' (9.15-10.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (9.1-14: -4) - $2 = token '\n' (9.15-10.0: ) --> $$ = nterm line (9.1-10.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-9.0: ) - $2 = nterm line (9.1-10.0: ) --> $$ = nterm input (1.1-10.0: ) -Entering state 6 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (10.1: 1) -Shifting token "number" (10.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.1: 1) --> $$ = nterm exp (10.1: 1) +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '-' (10.3: ) -Shifting token '-' (10.3: ) -Entering state 19 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (10.5: ) -Shifting token '(' (10.5: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (10.6: 2) -Shifting token "number" (10.6: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.6: 2) --> $$ = nterm exp (10.6: 2) -Entering state 12 -Reading a token -Next token is token '-' (10.8: ) -Shifting token '-' (10.8: ) -Entering state 19 -Reading a token -Next token is token "number" (10.10: 3) -Shifting token "number" (10.10: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.10: 3) --> $$ = nterm exp (10.10: 3) -Entering state 28 -Reading a token -Next token is token ')' (10.11: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.6: 2) - $2 = token '-' (10.8: ) - $3 = nterm exp (10.10: 3) --> $$ = nterm exp (10.6-10: -1) -Entering state 12 -Next token is token ')' (10.11: ) -Shifting token ')' (10.11: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (10.5: ) - $2 = nterm exp (10.6-10: -1) - $3 = token ')' (10.11: ) --> $$ = nterm exp (10.5-11: -1) -Entering state 28 -Reading a token -Next token is token '=' (10.13: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (10.1: 1) - $2 = token '-' (10.3: ) - $3 = nterm exp (10.5-11: -1) --> $$ = nterm exp (10.1-11: 2) -Entering state 8 -Next token is token '=' (10.13: ) -Shifting token '=' (10.13: ) -Entering state 18 -Reading a token -Next token is token "number" (10.15: 2) -Shifting token "number" (10.15: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (10.15: 2) --> $$ = nterm exp (10.15: 2) -Entering state 27 -Reading a token -Next token is token '\n' (10.16-11.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (10.1-11: 2) - $2 = token '=' (10.13: ) - $3 = nterm exp (10.15: 2) --> $$ = nterm exp (10.1-15: 2) -Entering state 8 -Next token is token '\n' (10.16-11.0: ) -Shifting token '\n' (10.16-11.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (10.1-15: 2) - $2 = token '\n' (10.16-11.0: ) --> $$ = nterm line (10.1-11.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-10.0: ) - $2 = nterm line (10.1-11.0: ) --> $$ = nterm input (1.1-11.0: ) -Entering state 6 -Reading a token -Next token is token '\n' (11.1-12.0: ) -Shifting token '\n' (11.1-12.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (11.1-12.0: ) --> $$ = nterm line (11.1-12.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-11.0: ) - $2 = nterm line (11.1-12.0: ) --> $$ = nterm input (1.1-12.0: ) -Entering state 6 -Reading a token -Next token is token "number" (12.1: 2) -Shifting token "number" (12.1: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.1: 2) --> $$ = nterm exp (12.1: 2) -Entering state 8 -Reading a token -Next token is token '^' (12.2: ) -Shifting token '^' (12.2: ) -Entering state 23 -Reading a token -Next token is token "number" (12.3: 2) -Shifting token "number" (12.3: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.3: 2) --> $$ = nterm exp (12.3: 2) -Entering state 32 -Reading a token -Next token is token '^' (12.4: ) -Shifting token '^' (12.4: ) -Entering state 23 -Reading a token -Next token is token "number" (12.5: 3) -Shifting token "number" (12.5: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.5: 3) --> $$ = nterm exp (12.5: 3) -Entering state 32 -Reading a token -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.3: 2) - $2 = token '^' (12.4: ) - $3 = nterm exp (12.5: 3) --> $$ = nterm exp (12.3-5: 8) -Entering state 32 -Next token is token '=' (12.7: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (12.1: 2) - $2 = token '^' (12.2: ) - $3 = nterm exp (12.3-5: 8) --> $$ = nterm exp (12.1-5: 256) -Entering state 8 -Next token is token '=' (12.7: ) -Shifting token '=' (12.7: ) -Entering state 18 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (12.9-11: 256) -Shifting token "number" (12.9-11: 256) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (12.9-11: 256) --> $$ = nterm exp (12.9-11: 256) -Entering state 27 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) -Entering state 6 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (13.2: 2) -Shifting token "number" (13.2: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.2: 2) --> $$ = nterm exp (13.2: 2) -Entering state 12 -Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 -Reading a token -Next token is token "number" (13.4: 2) -Shifting token "number" (13.4: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 -Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) -Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) -Entering state 8 -Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 -Reading a token -Next token is token "number" (13.7: 3) -Shifting token "number" (13.7: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 -Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 +Next token is token '*' () +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token "number" (13.11-12: 64) -Shifting token "number" (13.11-12: 64) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 118): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 82): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (14.1: ) +Shifting token end of file () Entering state 16 -Cleanup: popping token "end of input" (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1489: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -./calc.at:1491: cat stderr - | 1 2 -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1486: cat stderr -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./calc.at:1487: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -input: - | 1 = 2 = 3 -input: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1491: $PREPARSER ./calc input +Cleanup: popping token end of file () +Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +memory exhausted +./torture.at:504: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | (1 + 1) / (1 - 1) -./calc.at:1487: $PREPARSER ./calc input -stderr: -input: -stderr: stderr: +./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -220146,290 +220765,66 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stderr: - | (* *) + (*) + (*) -./calc.at:1486: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '*' (1.39: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '=' (1.44: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +stderr: +input: +stderr: +input: +stderr: Starting parse Entering state 0 Reading a token @@ -220440,7 +220835,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -220452,13 +220847,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -220467,7 +220862,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (2) $3 = token ')' () @@ -220485,7 +220880,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -220497,13 +220892,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (1) @@ -220512,7 +220907,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (0) $3 = token ')' () @@ -220520,7 +220915,7 @@ Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): +Reducing stack 0 by rule 10 (line 93): $1 = nterm exp (2) $2 = token '/' () $3 = nterm exp (0) @@ -220530,36 +220925,23 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -stderr: +./torture.at:541: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -220568,7 +220950,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Shifting token error () Entering state 11 Next token is token '*' () @@ -220582,7 +220964,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -220598,7 +220980,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Shifting token error () Entering state 11 Next token is token '*' () @@ -220609,7 +220991,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -220617,7 +220999,7 @@ Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -220632,7 +221014,7 @@ Entering state 4 Reading a token Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error on token ['*'] (expected: [number] ['-'] ['('] ['!']) Shifting token error () Entering state 11 Next token is token '*' () @@ -220643,7 +221025,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -220651,7 +221033,7 @@ Entering state 29 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (2222) $2 = token '+' () $3 = nterm exp (1111) @@ -220660,273 +221042,96 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (3333) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1491: $PREPARSER ./calc input + | (- *) + (1 2) = 1 +./calc.at:1486: $PREPARSER ./calc input +memory exhausted +memory exhausted +stderr: +stderr: +stderr: +./torture.at:541: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) Entering state 21 Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) Entering state 30 Reading a token -Next token is token '*' (1.39: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) Entering state 29 -Reading a token -Next token is token '=' (1.44: ) +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: -stderr: -stderr: +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) Starting parse Entering state 0 Reading a token @@ -220937,7 +221142,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -220949,13 +221154,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 103): +Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -220964,7 +221169,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (2) $3 = token ')' () @@ -220982,7 +221187,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -220994,13 +221199,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 92): +Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 104): +Reducing stack 0 by rule 8 (line 91): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (1) @@ -221009,7 +221214,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 117): +Reducing stack 0 by rule 13 (line 104): $1 = token '(' () $2 = nterm exp (0) $3 = token ')' () @@ -221017,7 +221222,7 @@ Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 106): +Reducing stack 0 by rule 10 (line 93): $1 = nterm exp (2) $2 = token '/' () $3 = nterm exp (0) @@ -221027,61 +221232,23 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 88): +Reducing stack 0 by rule 4 (line 75): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 82): +Reducing stack 0 by rule 1 (line 69): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of file () +Shifting token end of input () Entering state 16 -Cleanup: popping token end of file () +Cleanup: popping token end of input () Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 27 -Reading a token -Next token is token '=' () -syntax error, unexpected '=' -Error: popping nterm exp (2) -Error: popping token '=' () -Error: popping nterm exp (1) -Cleanup: discarding lookahead token '=' () -./calc.at:1494: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - +./torture.at:510: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -221090,16 +221257,22 @@ Shifting token '(' () Entering state 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () @@ -221120,12 +221293,21 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token number (2) +Error: discarding token number (2) Reading a token Next token is token ')' () Entering state 11 @@ -221139,24 +221321,165 @@ -> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '+' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) -> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2222) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./torture.at:545: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 +stderr: +stderr: +./calc.at:1489: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +memory exhausted +memory exhausted +Starting parse +Entering state 0 +Reading a token Next token is token '(' () Shifting token '(' () Entering state 4 Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 2 +Reading a token Next token is token '*' () syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error () +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' () + $2 = token error () +Shifting token error () Entering state 11 Next token is token '*' () Error: discarding token '*' () @@ -221171,20 +221494,76 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 +Reading a token +Next token is token number (2) +syntax error, unexpected number +Error: popping nterm exp (1) +Shifting token error () +Entering state 11 +Next token is token number (2) +Error: discarding token number (2) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '\n' () +Next token is token '=' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) + $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2222) + $2 = token '=' () + $3 = nterm exp (1) +error: 2222 != 1 +-> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -221198,7 +221577,132 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; +./torture.at:545: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !+ ++ +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1491: cat stderr +stderr: +./calc.at:1489: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +stderr: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +memory exhausted +memory exhausted +stderr: +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./torture.at:548: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +564. calc.at:1489: ok +stdout: +input: + | (#) + (#) = 2222 +./calc.at:1492: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -221208,10 +221712,172 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1491: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 14 +Reducing stack 0 by rule 17 (line 121): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: input: - | 1//2 -./calc.at:1492: $PREPARSER ./calc input input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -221225,45 +221891,183 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1494: $PREPARSER ./calc input +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 + | 1 + 2 * 3 + !- ++ +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:510: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1486: cat stderr stderr: -./calc.at:1487: cat stderr + Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1491: cat stderr -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Next token is token number (2) +Shifting token number (2) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 +Reading a token +Next token is token '*' () +Shifting token '*' () +Entering state 21 +Reading a token +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +stderr: +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./torture.at:548: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: stderr: -input: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (!!) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -222100,272 +222904,74 @@ Entering state 16 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -562. calc.at:1487: ok -stderr: +input: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1486: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1489: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token number (3) +Shifting token number (3) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 105): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) +Next token is token '+' () +Reducing stack 0 by rule 7 (line 103): + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1 + 2 * 3 + !+ ++ +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 122): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) + | (* *) + (*) + (*) ./calc.at:1486: $PREPARSER ./calc input -stderr: -input: ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -222376,7 +222982,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: +./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -222636,47 +223245,162 @@ $3 = nterm exp (4.8-9: -1) -> $$ = nterm exp (4.1-9: -1) Entering state 8 -Next token is token '\n' (4.10-5.0: ) -Shifting token '\n' (4.10-5.0: ) +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '+' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1111) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) +Entering state 8 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () +Entering state 11 +Next token is token '*' () +Error: discarding token '*' () +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 29 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4.1-9: -1) - $2 = token '\n' (4.10-5.0: ) --> $$ = nterm line (4.1-5.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-4.0: ) - $2 = nterm line (4.1-5.0: ) --> $$ = nterm input (1.1-5.0: ) + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token -Next token is token '(' (5.1: ) -Shifting token '(' (5.1: ) -Entering state 4 -Reading a token -Next token is token '-' (5.2: ) -Shifting token '-' (5.2: ) -Entering state 2 -Reading a token -Next token is token "number" (5.3: 1) -Shifting token "number" (5.3: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (5.3: 1) --> $$ = nterm exp (5.3: 1) -Entering state 10 -Reading a token -Next token is token ')' (5.4: ) -Reducing stack 0 by rule 11 (line 102): - $1 = token '-' (5.2: ) - $2 = nterm exp (5.3: 1) --> $$ = nterm exp (5.2-3: -1) -Entering state 12 -Next token is token ')' (5.4: ) -Shifting token ')' (5.4: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (5.1: ) +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +5.1: ) $2 = nterm exp (5.2-3: -1) $3 = token ')' (5.4: ) -> $$ = nterm exp (5.1-4: -1) @@ -223213,872 +223937,135 @@ Entering state 16 Cleanup: popping token "end of input" (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | - | +1 -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -stderr: -./calc.at:1491: cat stderr ./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -./calc.at:1492: cat stderr -input: -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 2 -./calc.at:1494: $PREPARSER ./calc input -stdout: -./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 input: +./calc.at:1487: cat stderr input: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' () -Shifting token '\n' () -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Next token is token '+' () -syntax error, unexpected '+' -Error: popping nterm input () -Cleanup: discarding lookahead token '+' () -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) - | (- *) + (1 2) = 1 - | error + | 1 2 + | (1 + #) = 1111 ./calc.at:1492: $PREPARSER ./calc input ./calc.at:1491: $PREPARSER ./calc input -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -input: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token "number" (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token "number" (1.3: 2) - | 1 + 2 * 3 + !- ++ -./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.5: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.13: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) + $1 = nterm exp (1111) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1111) +-> $$ = nterm exp (2222) Entering state 8 Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -./calc.at:1491: cat stderr -stderr: -stderr: -./calc.at:1489: cat stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -memory exhausted -memory exhausted -./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1494: cat stderr -input: -stderr: -memory exhausted -memory exhausted -./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 - | (* *) + (*) + (*) -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1489: $PREPARSER ./calc /dev/null -./calc.at:1492: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -614. torture.at:485: ok -stderr: -./torture.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1//2 -stderr: -./calc.at:1494: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) +Next token is token '*' () +syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error () Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '*' () +Error: discarding token '*' () Reading a token -Next token is token ')' (1.17: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 29 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) + $1 = nterm exp (2222) + $2 = token '+' () + $3 = nterm exp (1111) +-> $$ = nterm exp (3333) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (3333) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ... -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () stderr: -./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 -./calc.at:1486: cat stderr input: stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input () -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) - | 1 = 2 = 3 -./calc.at:1492: $PREPARSER ./calc input -stderr: -stderr: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +617. existing.at:74: testing GNU AWK 3.1.0 Grammar: IELR(1) ... +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y Starting parse Entering state 0 Reading a token @@ -224086,102 +224073,64 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -224194,45 +224143,8 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (#) + (#) = 2222 -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -stderr: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./calc.at:1487: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -224244,17 +224156,12 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: +Cleanup: discarding lookahead token "number" (1.3: 2) stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -224274,7 +224181,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -224301,7 +224208,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -224309,7 +224216,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -224322,13 +224229,13 @@ Next token is token number (2222) Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2222) -> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (2222) @@ -224337,31 +224244,25 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -224373,53 +224274,88 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -memory exhausted -memory exhausted -./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -./calc.at:1494: cat stderr -./calc.at:1491: cat stderr +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) Starting parse Entering state 0 Reading a token @@ -224439,7 +224375,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -224466,7 +224402,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -224474,7 +224410,7 @@ Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1111) $2 = token '+' () $3 = nterm exp (1111) @@ -224487,13 +224423,13 @@ Next token is token number (2222) Shifting token number (2222) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (2222) -> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (2222) $2 = token '=' () $3 = nterm exp (2222) @@ -224502,27 +224438,22 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2222) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: -memory exhausted -memory exhausted -./calc.at:1489: cat stderr -615. torture.at:531: ok -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -224532,10 +224463,17 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -input: -input: -./calc.at:1486: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -224545,16 +224483,25 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1492: cat stderr +./calc.at:1487: cat stderr +./calc.at:1486: cat stderr +./calc.at:1491: cat stderr input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1489: $PREPARSER ./calc input - | error -./calc.at:1494: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1491: $PREPARSER ./calc input input: + | (1 + #) = 1111 +input: + | 1//2 +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1492: $PREPARSER ./calc input + | (# + 1) = 1111 +input: +./calc.at:1491: $PREPARSER ./calc input + | 1 + 2 * 3 + !+ ++ +./calc.at:1486: $PREPARSER ./calc input +stderr: +stderr: stderr: -./calc.at:1486: cat stderr Starting parse Entering state 0 Reading a token @@ -224562,584 +224509,133 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 Reading a token Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 Reading a token Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) +Reducing stack 0 by rule 5 (line 92): + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) +Reducing stack 0 by rule 6 (line 93): + $1 = nterm exp (1111) $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) +Reducing stack 0 by rule 4 (line 88): + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | - | +1 -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) - -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (3) -Entering state 12 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token ')' () -syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' () -Error: popping nterm exp (3) -Shifting token error () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token '*' () -Error: discarding token '*' () +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' () +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (3333) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 30 -Reading a token -Next token is token '*' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1) - $2 = token '*' () - $3 = nterm exp (2) --> $$ = nterm exp (2) -Entering state 12 -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' () -Error: popping nterm exp (2) -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (3333) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (4444) -Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (4444) - $2 = token '=' () - $3 = nterm exp (1) -error: 4444 != 1 --> $$ = nterm exp (4444) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (4444) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -225147,21 +224643,13 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -619. existing.at:808: testing GNU Cim Grammar: LALR(1) ... -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -input: -Starting parse -Entering state 0 -Reading a token -Next token is token "invalid token" (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token "invalid token" (1.1: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -225173,65 +224661,84 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +Starting parse +Entering state 0 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) +Next token is token '!' () +Shifting token '!' () Entering state 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) +Next token is token '+' () +Shifting token '+' () Entering state 14 Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (1 + #) = 1111 -./calc.at:1486: $PREPARSER ./calc input -input: + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - | 1 + 2 * 3 + !- ++ stderr: Starting parse Entering state 0 @@ -225243,7 +224750,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -225266,7 +224773,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -225280,13 +224787,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -225295,144 +224802,26 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) stderr: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token Next token is token "number" (1.1: 1) Shifting token "number" (1.1: 1) Entering state 1 @@ -225441,271 +224830,153 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1489: cat stderr -./calc.at:1494: cat stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: -stderr: -./calc.at:1492: cat stderr +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) + $1 = token number (2) +-> $$ = nterm exp (2) Entering state 29 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) +Next token is token '*' () +Shifting token '*' () Entering state 21 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) + $1 = token number (3) +-> $$ = nterm exp (3) Entering state 30 Reading a token -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Next token is token '+' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) - | (!!) + (1 2) = 1 -input: -./calc.at:1489: $PREPARSER ./calc input - | 1 = 2 = 3 -./calc.at:1494: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token Next token is token '!' () Shifting token '!' () Entering state 5 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token Next token is token '+' () Shifting token '+' () -Entering state 20 +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' () + $2 = token '+' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +Starting parse +Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' () +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: + | 1 + 2 * 3 + !- ++ +./calc.at:1486: $PREPARSER ./calc input stderr: -./calc.at:1492: $PREPARSER ./calc /dev/null ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -225716,120 +224987,86 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' () - $2 = token '!' () -Shifting token error () -Entering state 11 -Reading a token -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 8 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token number (3) +Shifting token number (3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token '=' () +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) +Entering state 29 +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) + $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' () -Shifting token '=' () -Entering state 18 +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +Next token is token '!' () +Shifting token '!' () +Entering state 5 +Reading a token +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1487: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 Reading a token Next token is token number (1) Shifting token number (1) @@ -225837,193 +225074,70 @@ Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 --> $$ = nterm exp (2222) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr -620. existing.at:808: testing GNU Cim Grammar: IELR(1) ... -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1486: cat stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -input: -stderr: -stderr: - | (#) + (#) = 2222 -input: -Starting parse -Entering state 0 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token number (2) +Shifting token number (2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token number (2) +-> $$ = nterm exp (2) +Entering state 29 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 +Next token is token '*' () +Shifting token '*' () +Entering state 21 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token number (3) +Shifting token number (3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1491: $PREPARSER ./calc input - | (# + 1) = 1111 -stderr: -./calc.at:1486: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) + $1 = token number (3) +-> $$ = nterm exp (3) +Entering state 30 Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) +Next token is token '+' () +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2) + $2 = token '*' () + $3 = nterm exp (3) +-> $$ = nterm exp (6) Entering state 29 -Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (6) +-> $$ = nterm exp (7) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 +Next token is token '+' () +Shifting token '+' () +Entering state 20 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '!' () +Shifting token '!' () +Entering state 5 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '-' () +Shifting token '-' () +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' () + $2 = token '-' () +Cleanup: popping token '+' () +Cleanup: popping nterm exp (7) +./calc.at:1491: cat stderr +input: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none + | (# + 1) = 1111 +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1492: cat stderr stderr: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +input: Starting parse Entering state 0 Reading a token @@ -226049,7 +225163,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -226063,13 +225177,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -226078,34 +225192,25 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + # + 1) = 1111 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -226115,10 +225220,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: cat stderr -stderr: -./calc.at:1494: cat stderr stderr: +input: Starting parse Entering state 0 Reading a token @@ -226144,7 +225247,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -226158,13 +225261,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -226173,21 +225276,25 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () +./calc.at:1486: cat stderr + | error +stderr: +./calc.at:1492: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -226195,84 +225302,70 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.9: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -226285,26 +225378,20 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: cat stderr -input: - | (- *) + (1 2) = 1 -input: -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | - | +1 -./calc.at:1494: $PREPARSER ./calc input stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) input: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -226312,21 +225399,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -226348,21 +225426,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -226387,20 +225456,19 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 + $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -226421,42 +225489,8 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1492: $PREPARSER ./calc input -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -stderr: +./calc.at:1487: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -226464,234 +225498,70 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) Reading a token -Next token is token ')' (1.28: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -226704,8 +225574,24 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1486: cat stderr -input: +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -226713,21 +225599,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 2 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' () - $2 = token error () +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -226749,21 +225626,12 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token number (2) -syntax error, unexpected number -Error: popping nterm exp (1) +syntax error: invalid character: '#' +Next token is token error () Shifting token error () Entering state 11 -Next token is token number (2) -Error: discarding token number (2) +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -226788,20 +225656,19 @@ Shifting token '=' () Entering state 18 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token number (2222) +Shifting token number (2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token number (2222) +-> $$ = nterm exp (2222) Entering state 27 Reading a token Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): $1 = nterm exp (2222) $2 = token '=' () - $3 = nterm exp (1) -error: 2222 != 1 + $3 = nterm exp (2222) -> $$ = nterm exp (2222) Entering state 8 Next token is token '\n' () @@ -226822,361 +225689,21 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () - | (1 + #) = 1111 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: | (1 + # + 1) = 1111 +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1491: cat stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./calc.at:1486: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) -Entering state 4 -Reading a token -Next token is token "number" (1.7: 1) -Shifting token "number" (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token "number" (1.11: 1) -Shifting token "number" (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token "number" (1.15: 1) -Shifting token "number" (1.15: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) -Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) -Entering state 20 -Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) -Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) -Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) -Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) -Reading a token -Next token is token ')' (1.28: ) -Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) -Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token "number" (1.33: 1) -Shifting token "number" (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token "number" (1.37: 2) -Shifting token "number" (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) -Entering state 18 -Reading a token -Next token is token "number" (1.46: 1) -Shifting token "number" (1.46: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.46: 1) --> $$ = nterm exp (1.46: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.47-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) -Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -stderr: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -227187,7 +225714,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -227216,7 +225743,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -227230,13 +225757,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -227245,112 +225772,29 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr +input: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror + | (1 + 1) / (1 - 1) +./calc.at:1491: $PREPARSER ./calc input stderr: -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) stderr: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 Starting parse Entering state 0 Reading a token @@ -227361,7 +225805,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -227390,7 +225834,7 @@ Next token is token ')' () Shifting token ')' () Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Reducing stack 0 by rule 14 (line 118): $1 = token '(' () $2 = token error () $3 = token ')' () @@ -227404,13 +225848,13 @@ Next token is token number (1111) Shifting token number (1111) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1111) -> $$ = nterm exp (1111) Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): +Reducing stack 0 by rule 6 (line 93): $1 = nterm exp (1111) $2 = token '=' () $3 = nterm exp (1111) @@ -227419,58 +225863,21 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1489: cat stderr -./calc.at:1492: cat stderr -./calc.at:1491: cat stderr -./calc.at:1494: cat stderr -./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -input: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y ./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -227481,17 +225888,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./torture.at:140: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: -input: - | (* *) + (*) + (*) -./calc.at:1489: $PREPARSER ./calc input - | (!!) + (1 2) = 1 - | (# + 1) = 1111 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1492: $PREPARSER ./calc input -stderr: -./calc.at:1494: $PREPARSER ./calc /dev/null Starting parse Entering state 0 Reading a token @@ -227499,56 +225895,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 Reading a token Next token is token ')' (1.7: ) -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -227561,11 +226003,43 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: +input: + | 1 = 2 = 3 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1486: cat stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) stderr: -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 +./calc.at:1487: cat stderr Starting parse Entering state 0 Reading a token @@ -227573,75 +226047,62 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token Next token is token "number" (1.16: 1) Shifting token "number" (1.16: 1) @@ -227649,22 +226110,39 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 27 +Entering state 28 Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -227677,7 +226155,13 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (1 + #) = 1111 +./calc.at:1486: $PREPARSER ./calc input +stderr: +input: +stderr: Starting parse Entering state 0 Reading a token @@ -227685,42 +226169,26 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -227732,54 +226200,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) -> $$ = nterm exp (1111) -Entering state 29 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -227793,20 +226239,48 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () + | (1 + 1) / (1 - 1) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | (1 + 1) / (1 - 1) -./calc.at:1486: $PREPARSER ./calc input -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1487: $PREPARSER ./calc input +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: Starting parse Entering state 0 @@ -227815,42 +226289,26 @@ Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token '*' () -Error: discarding token '*' () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) Shifting token error () Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token error () +Error: discarding token error () Reading a token Next token is token ')' () Entering state 11 @@ -227862,54 +226320,32 @@ $2 = token error () $3 = token ')' () -> $$ = nterm exp (1111) -Entering state 29 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token '*' () -syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error () -Entering state 11 -Next token is token '*' () -Error: discarding token '*' () +Next token is token '=' () +Shifting token '=' () +Entering state 18 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) -> $$ = nterm exp (1111) -Entering state 29 +Entering state 27 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (2222) - $2 = token '+' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () $3 = nterm exp (1111) --> $$ = nterm exp (3333) +-> $$ = nterm exp (1111) Entering state 8 Next token is token '\n' () Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (3333) + $1 = nterm exp (1111) $2 = token '\n' () -> $$ = nterm line () Entering state 7 @@ -227923,76 +226359,8 @@ Entering state 16 Cleanup: popping token end of input () Cleanup: popping nterm input () -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) stderr: +./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -228003,7 +226371,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -228015,13 +226383,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -228030,7 +226398,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (2) $3 = token ')' () @@ -228048,7 +226416,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -228060,13 +226428,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (1) @@ -228075,7 +226443,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (0) $3 = token ')' () @@ -228083,7 +226451,7 @@ Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): +Reducing stack 0 by rule 10 (line 106): $1 = nterm exp (2) $2 = token '/' () $3 = nterm exp (0) @@ -228093,144 +226461,23 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) -Entering state 20 -Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) -Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.14: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) -Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) -Entering state 18 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token "end of input" (1.1: ) -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1487: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -228240,6 +226487,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +565. calc.at:1491: ok +stderr: Starting parse Entering state 0 Reading a token @@ -228250,7 +226499,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -228262,13 +226511,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 29 Reading a token Next token is token ')' () -Reducing stack 0 by rule 7 (line 90): +Reducing stack 0 by rule 7 (line 103): $1 = nterm exp (1) $2 = token '+' () $3 = nterm exp (1) @@ -228277,7 +226526,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (2) $3 = token ')' () @@ -228295,7 +226544,7 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 12 @@ -228307,13 +226556,13 @@ Next token is token number (1) Shifting token number (1) Entering state 1 -Reducing stack 0 by rule 5 (line 79): +Reducing stack 0 by rule 5 (line 92): $1 = token number (1) -> $$ = nterm exp (1) Entering state 28 Reading a token Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): +Reducing stack 0 by rule 8 (line 104): $1 = nterm exp (1) $2 = token '-' () $3 = nterm exp (1) @@ -228322,7 +226571,7 @@ Next token is token ')' () Shifting token ')' () Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Reducing stack 0 by rule 13 (line 117): $1 = token '(' () $2 = nterm exp (0) $3 = token ')' () @@ -228330,7 +226579,7 @@ Entering state 31 Reading a token Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): +Reducing stack 0 by rule 10 (line 106): $1 = nterm exp (2) $2 = token '/' () $3 = nterm exp (0) @@ -228340,22 +226589,23 @@ Next token is token '\n' () Shifting token '\n' () Entering state 24 -Reducing stack 0 by rule 4 (line 75): +Reducing stack 0 by rule 4 (line 88): $1 = nterm exp (2) $2 = token '\n' () -> $$ = nterm line () Entering state 7 -Reducing stack 0 by rule 1 (line 69): +Reducing stack 0 by rule 1 (line 82): $1 = nterm line () -> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token end of file () Entering state 16 -Cleanup: popping token end of input () +Cleanup: popping token end of file () Cleanup: popping nterm input () -./calc.at:1492: "$PERL" -pi -e 'use strict; +./calc.at:1492: cat stderr +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -228365,7 +226615,200 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1489: "$PERL" -pi -e 'use strict; +input: +./calc.at:1486: cat stderr + | + | +1 +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1487: cat stderr + +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1486: $PREPARSER ./calc input +562. calc.at:1487: ok +stderr: +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Shifting token error () +Entering state 11 +Next token is token error () +Error: discarding token error () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () +Entering state 11 +Next token is token ')' () +Shifting token ')' () +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 18 +Reading a token +Next token is token number (1111) +Shifting token number (1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1111) +-> $$ = nterm exp (1111) +Entering state 27 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -228375,8 +226818,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: cat stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; + +./calc.at:1492: cat stderr +./calc.at:1492: $PREPARSER ./calc /dev/null +stderr: +stderr: +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -228386,11 +226833,33 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1492: cat stderr -./torture.at:237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1494: cat stderr -./calc.at:1489: cat stderr -./calc.at:1486: "$PERL" -pi -e 'use strict; +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stdout: +./torture.at:551: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +618. existing.at:74: testing GNU AWK 3.1.0 Grammar: Canonical LR(1) ... +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./calc.at:1486: cat stderr +./torture.at:551: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) +stderr: +input: + | (1 + # + 1) = 1111 +./calc.at:1486: $PREPARSER ./calc input +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -228400,226 +226869,228 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: -input: - | (1 + # + 1) = 1111 -./calc.at:1491: $PREPARSER ./calc input - | (- *) + (1 2) = 1 -./calc.at:1492: $PREPARSER ./calc input +./torture.at:553: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 stderr: -./calc.at:1486: cat stderr -input: -input: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token number (1) +Error: discarding token number (1) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' () Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:553: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr +stderr: +stderr: +stderr: +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 65 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./torture.at:555: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 +619. existing.at:808: testing GNU Cim Grammar: LALR(1) ... +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 12 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) +Next token is token '+' () +Shifting token '+' () +Entering state 20 +Reading a token +syntax error: invalid character: '#' +Next token is token error () +Error: popping token '+' () +Error: popping nterm exp (1) +Shifting token error () Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token error () +Error: discarding token error () Reading a token -Next token is token ')' (1.13: ) +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token number (1) +Error: discarding token number (1) +Reading a token +Next token is token ')' () Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) +Next token is token ')' () +Shifting token ')' () Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) + $1 = token '(' () + $2 = token error () + $3 = token ')' () +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Reading a token +Next token is token '=' () +Shifting token '=' () Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token number (1111) +Shifting token number (1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token number (1111) +-> $$ = nterm exp (1111) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' () Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (1111) + $2 = token '=' () + $3 = nterm exp (1111) +-> $$ = nterm exp (1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (1111) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +input: +memory exhausted +memory exhausted +./torture.at:555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1494: $PREPARSER ./calc input - | 1 + 2 * 3 + !+ ++ -./calc.at:1489: $PREPARSER ./calc input -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -561. calc.at:1486: ok +./calc.at:1492: $PREPARSER ./calc input +stderr: +stdout: +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 stderr: stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./torture.at:513: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 20 Starting parse Entering state 0 Reading a token @@ -228867,345 +227338,11 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' () - $2 = token '+' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !- ++ -./calc.at:1489: $PREPARSER ./calc input +memory exhausted +memory exhausted stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; +615. torture.at:531: ok +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -229215,73 +227352,12 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./torture.at:513: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./calc.at:1486: cat stderr +stderr: stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token number (2) -Shifting token number (2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) -Entering state 29 -Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) -Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 -Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () - $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -229529,7 +227605,14 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; +./torture.at:515: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 900 +stderr: +input: +./torture.at:515: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + + | (1 + 1) / (1 - 1) +./calc.at:1486: $PREPARSER ./calc input +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -229539,211 +227622,254 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - -./calc.at:1492: cat stderr +stderr: stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token Next token is token number (1) Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): $1 = token number (1) -> $$ = nterm exp (1) -Entering state 8 +Entering state 12 Reading a token Next token is token '+' () Shifting token '+' () Entering state 20 Reading a token -Next token is token number (2) -Shifting token number (2) +Next token is token number (1) +Shifting token number (1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2) --> $$ = nterm exp (2) + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '*' () -Shifting token '*' () -Entering state 21 -Reading a token -Next token is token number (3) -Shifting token number (3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (3) --> $$ = nterm exp (3) -Entering state 30 -Reading a token -Next token is token '+' () -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (2) - $2 = token '*' () - $3 = nterm exp (3) --> $$ = nterm exp (6) -Entering state 29 -Next token is token '+' () +Next token is token ')' () Reducing stack 0 by rule 7 (line 90): $1 = nterm exp (1) $2 = token '+' () - $3 = nterm exp (6) --> $$ = nterm exp (7) + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' () -Shifting token '+' () -Entering state 20 Reading a token -Next token is token '!' () -Shifting token '!' () -Entering state 5 +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 4 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token Next token is token '-' () Shifting token '-' () -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' () +Entering state 19 +Reading a token +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) $2 = token '-' () -Cleanup: popping token '+' () -Cleanup: popping nterm exp (7) -./calc.at:1491: cat stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: - | (* *) + (*) + (*) -input: -./calc.at:1492: $PREPARSER ./calc input - | (1 + 1) / (1 - 1) -./calc.at:1494: cat stderr -./calc.at:1491: $PREPARSER ./calc input + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) +Entering state 8 +Next token is token '\n' () +Shifting token '\n' () +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line () +-> $$ = nterm input () +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input () +Entering state 16 +Cleanup: popping token end of input () +Cleanup: popping nterm input () +./calc.at:1492: cat stderr +./calc.at:1486: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./torture.at:517: VALGRIND_OPTS="$VALGRIND_OPTS --log-fd=1" $PREPARSER ./input 10000 stderr: stderr: -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +memory exhausted +memory exhausted +./torture.at:517: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) +Next token is token '+' () +Shifting token '+' () Entering state 20 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) Entering state 29 Reading a token -Next token is token '+' (1.13: ) +Next token is token ')' () Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) + $1 = nterm exp (1) + $2 = token '+' () + $3 = nterm exp (1) +-> $$ = nterm exp (2) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (2) + $3 = token ')' () +-> $$ = nterm exp (2) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) +Next token is token '/' () +Shifting token '/' () +Entering state 22 +Reading a token +Next token is token '(' () +Shifting token '(' () Entering state 4 Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 12 Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 +Next token is token '-' () +Shifting token '-' () +Entering state 19 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) +Next token is token number (1) +Shifting token number (1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1) +-> $$ = nterm exp (1) +Entering state 28 +Reading a token +Next token is token ')' () +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1) + $2 = token '-' () + $3 = nterm exp (1) +-> $$ = nterm exp (0) +Entering state 12 +Next token is token ')' () +Shifting token ')' () +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' () + $2 = nterm exp (0) + $3 = token ')' () +-> $$ = nterm exp (0) +Entering state 31 +Reading a token +Next token is token '\n' () +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (2) + $2 = token '/' () + $3 = nterm exp (0) +error: null divisor +-> $$ = nterm exp (2) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' () +Shifting token '\n' () Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) + $1 = nterm exp (2) + $2 = token '\n' () +-> $$ = nterm line () Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm line () +-> $$ = nterm input () Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token end of input () Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token end of input () +Cleanup: popping nterm input () +stderr: +input: +memory exhausted +memory exhausted + | (!!) + (1 2) = 1 +614. torture.at:485: ok +./calc.at:1492: $PREPARSER ./calc input +stderr: Starting parse Entering state 0 Reading a token @@ -229751,62 +227877,75 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 Reading a token Next token is token "number" (1.16: 1) Shifting token "number" (1.16: 1) @@ -229814,39 +227953,22 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1.16: 1) -> $$ = nterm exp (1.16: 1) -Entering state 28 +Entering state 27 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -229859,7 +227981,7 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1486: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -229869,16 +227991,10 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input: - | (!!) + (1 2) = 1 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1489: cat stderr -input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1486: cat stderr + stderr: -stdout: Starting parse Entering state 0 Reading a token @@ -229990,9 +228106,282 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +561. calc.at:1486: ok +620. existing.at:808: testing GNU Cim Grammar: IELR(1) ... +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +./calc.at:1492: cat stderr +input: + | (- *) + (1 2) = 1 +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 +./calc.at:1492: $PREPARSER ./calc input +stderr: +621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ... +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y stderr: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 +Reading a token +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) +Entering state 11 +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) +Entering state 18 +Reading a token +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./calc.at:1492: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1492: $PREPARSER ./calc input stderr: -./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' Starting parse Entering state 0 Reading a token @@ -230108,6 +228497,8 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -230115,101 +228506,101 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 +Entering state 11 Next token is token ')' (1.17: ) Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 Reading a token Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 Next token is token '\n' (1.18-2.0: ) Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) + $1 = nterm exp (1.1-17: 3333) $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 @@ -230223,112 +228614,122 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1492: cat stderr +input: + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1494: $PREPARSER ./calc input +./torture.at:237: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS input: -621. existing.at:808: testing GNU Cim Grammar: Canonical LR(1) ... - | (#) + (#) = 2222 stderr: -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./calc.at:1489: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '!' (1.2: ) -Shifting token '!' (1.2: ) -Entering state 5 -Reading a token -Next token is token '!' (1.3: ) -Shifting token '!' (1.3: ) -Entering state 15 -Reducing stack 0 by rule 16 (line 107): - $1 = token '!' (1.2: ) - $2 = token '!' (1.3: ) -Shifting token error (1.2-3: ) -Entering state 11 -Reading a token -Next token is token ')' (1.4: ) -Shifting token ')' (1.4: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-3: ) - $3 = token ')' (1.4: ) --> $$ = nterm exp (1.1-4: 1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.6: ) -Shifting token '+' (1.6: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -Next token is token '(' (1.8: ) -Shifting token '(' (1.8: ) -Entering state 4 -Reading a token -Next token is token "number" (1.9: 1) -Shifting token "number" (1.9: 1) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 1) --> $$ = nterm exp (1.9: 1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token "number" (1.11: 2) -1.11: syntax error, unexpected number -Error: popping nterm exp (1.9: 1) -Shifting token error (1.9-11: ) -Entering state 11 -Next token is token "number" (1.11: 2) -Error: discarding token "number" (1.11: 2) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token ')' (1.12: ) -Entering state 11 -Next token is token ')' (1.12: ) -Shifting token ')' (1.12: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.8: ) - $2 = token error (1.9-11: ) - $3 = token ')' (1.12: ) --> $$ = nterm exp (1.8-12: 1111) -Entering state 29 +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token '=' (1.14: ) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-4: 1111) - $2 = token '+' (1.6: ) - $3 = nterm exp (1.8-12: 1111) --> $$ = nterm exp (1.1-12: 2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' (1.14: ) -Shifting token '=' (1.14: ) +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-12: 2222) - $2 = token '=' (1.14: ) - $3 = nterm exp (1.16: 1) -1.1-16: error: 2222 != 1 --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -230336,452 +228737,1673 @@ -> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) +Entering state 4 +Reading a token +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 +Reading a token +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 +Reading a token +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) +Entering state 4 +Reading a token +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 +Reading a token +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 +Reading a token +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) +Entering state 8 +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) +Entering state 18 +Reading a token +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) +Entering state 27 +Reading a token +Next token is token '\n' (10.16-11.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) +Entering state 8 +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 +Reading a token +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) +Entering state 8 +Reading a token +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 +Reading a token +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 +Reading a token +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 +Reading a token +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 +Reading a token +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) +Entering state 8 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 +Reading a token +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 +Reading a token +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Reading a token +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) + | 1 + 2 * 3 + !+ ++ +./calc.at:1492: $PREPARSER ./calc input +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token '=' () +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '=' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token "number" (1.13: 7) +Shifting token "number" (1.13: 7) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token "number" (1.13: 7) +-> $$ = nterm exp (1.13: 7) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.14-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $1 = nterm exp (1.1-9: 7) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13: 7) +-> $$ = nterm exp (1.1-13: 7) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.14-2.0: ) +Shifting token '\n' (1.14-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-13: 7) + $2 = token '\n' (1.14-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr -stderr: -./calc.at:1492: cat stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 +Next token is token "number" (2.1: 1) +Shifting token "number" (2.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.1: 1) +-> $$ = nterm exp (2.1: 1) +Entering state 8 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '+' (2.3: ) +Shifting token '+' (2.3: ) +Entering state 20 +Reading a token +Next token is token "number" (2.5: 2) +Shifting token "number" (2.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.5: 2) +-> $$ = nterm exp (2.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (2.7: ) +Shifting token '*' (2.7: ) +Entering state 21 +Reading a token +Next token is token '-' (2.9: ) +Shifting token '-' (2.9: ) +Entering state 2 +Reading a token +Next token is token "number" (2.10: 3) +Shifting token "number" (2.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.10: 3) +-> $$ = nterm exp (2.10: 3) +Entering state 10 +Reading a token +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.9: ) + $2 = nterm exp (2.10: 3) +-> $$ = nterm exp (2.9-10: -3) +Entering state 30 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (2.5: 2) + $2 = token '*' (2.7: ) + $3 = nterm exp (2.9-10: -3) +-> $$ = nterm exp (2.5-10: -6) +Entering state 29 +Next token is token '=' (2.12: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (2.1: 1) + $2 = token '+' (2.3: ) + $3 = nterm exp (2.5-10: -6) +-> $$ = nterm exp (2.1-10: -5) +Entering state 8 +Next token is token '=' (2.12: ) +Shifting token '=' (2.12: ) +Entering state 18 +Reading a token +Next token is token '-' (2.14: ) +Shifting token '-' (2.14: ) +Entering state 2 +Reading a token +Next token is token "number" (2.15: 5) +Shifting token "number" (2.15: 5) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (2.15: 5) +-> $$ = nterm exp (2.15: 5) +Entering state 10 +Reading a token +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (2.14: ) + $2 = nterm exp (2.15: 5) +-> $$ = nterm exp (2.14-15: -5) +Entering state 27 +Next token is token '\n' (2.16-3.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (2.1-10: -5) + $2 = token '=' (2.12: ) + $3 = nterm exp (2.14-15: -5) +-> $$ = nterm exp (2.1-15: -5) +Entering state 8 +Next token is token '\n' (2.16-3.0: ) +Shifting token '\n' (2.16-3.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (2.1-15: -5) + $2 = token '\n' (2.16-3.0: ) +-> $$ = nterm line (2.1-3.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-2.0: ) + $2 = nterm line (2.1-3.0: ) +-> $$ = nterm input (1.1-3.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (3.1-4.0: ) +Shifting token '\n' (3.1-4.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (3.1-4.0: ) +-> $$ = nterm line (3.1-4.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-3.0: ) + $2 = nterm line (3.1-4.0: ) +-> $$ = nterm input (1.1-4.0: ) +Entering state 6 +Reading a token +Next token is token '-' (4.1: ) +Shifting token '-' (4.1: ) +Entering state 2 +Reading a token +Next token is token "number" (4.2: 1) +Shifting token "number" (4.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.2: 1) +-> $$ = nterm exp (4.2: 1) +Entering state 10 +Reading a token +Next token is token '^' (4.3: ) +Shifting token '^' (4.3: ) +Entering state 23 +Reading a token +Next token is token "number" (4.4: 2) +Shifting token "number" (4.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.4: 2) +-> $$ = nterm exp (4.4: 2) +Entering state 32 +Reading a token +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (4.2: 1) + $2 = token '^' (4.3: ) + $3 = nterm exp (4.4: 2) +-> $$ = nterm exp (4.2-4: 1) +Entering state 10 +Next token is token '=' (4.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.1: ) + $2 = nterm exp (4.2-4: 1) +-> $$ = nterm exp (4.1-4: -1) +Entering state 8 +Next token is token '=' (4.6: ) +Shifting token '=' (4.6: ) +Entering state 18 +Reading a token +Next token is token '-' (4.8: ) +Shifting token '-' (4.8: ) +Entering state 2 +Reading a token +Next token is token "number" (4.9: 1) +Shifting token "number" (4.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (4.9: 1) +-> $$ = nterm exp (4.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (4.8: ) + $2 = nterm exp (4.9: 1) +-> $$ = nterm exp (4.8-9: -1) +Entering state 27 +Next token is token '\n' (4.10-5.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (4.1-4: -1) + $2 = token '=' (4.6: ) + $3 = nterm exp (4.8-9: -1) +-> $$ = nterm exp (4.1-9: -1) +Entering state 8 +Next token is token '\n' (4.10-5.0: ) +Shifting token '\n' (4.10-5.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (4.1-9: -1) + $2 = token '\n' (4.10-5.0: ) +-> $$ = nterm line (4.1-5.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-4.0: ) + $2 = nterm line (4.1-5.0: ) +-> $$ = nterm input (1.1-5.0: ) +Entering state 6 +Reading a token +Next token is token '(' (5.1: ) +Shifting token '(' (5.1: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token '-' (5.2: ) +Shifting token '-' (5.2: ) +Entering state 2 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) +Next token is token "number" (5.3: 1) +Shifting token "number" (5.3: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.3: 1) +-> $$ = nterm exp (5.3: 1) +Entering state 10 +Reading a token +Next token is token ')' (5.4: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (5.2: ) + $2 = nterm exp (5.3: 1) +-> $$ = nterm exp (5.2-3: -1) +Entering state 12 +Next token is token ')' (5.4: ) +Shifting token ')' (5.4: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (5.1: ) + $2 = nterm exp (5.2-3: -1) + $3 = token ')' (5.4: ) +-> $$ = nterm exp (5.1-4: -1) +Entering state 8 +Reading a token +Next token is token '^' (5.5: ) +Shifting token '^' (5.5: ) +Entering state 23 +Reading a token +Next token is token "number" (5.6: 2) +Shifting token "number" (5.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.6: 2) +-> $$ = nterm exp (5.6: 2) +Entering state 32 +Reading a token +Next token is token '=' (5.8: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (5.1-4: -1) + $2 = token '^' (5.5: ) + $3 = nterm exp (5.6: 2) +-> $$ = nterm exp (5.1-6: 1) +Entering state 8 +Next token is token '=' (5.8: ) +Shifting token '=' (5.8: ) +Entering state 18 +Reading a token +Next token is token "number" (5.10: 1) +Shifting token "number" (5.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (5.10: 1) +-> $$ = nterm exp (5.10: 1) +Entering state 27 +Reading a token +Next token is token '\n' (5.11-6.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (5.1-6: 1) + $2 = token '=' (5.8: ) + $3 = nterm exp (5.10: 1) +-> $$ = nterm exp (5.1-10: 1) +Entering state 8 +Next token is token '\n' (5.11-6.0: ) +Shifting token '\n' (5.11-6.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (5.1-10: 1) + $2 = token '\n' (5.11-6.0: ) +-> $$ = nterm line (5.1-6.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-5.0: ) + $2 = nterm line (5.1-6.0: ) +-> $$ = nterm input (1.1-6.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (6.1-7.0: ) +Shifting token '\n' (6.1-7.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (6.1-7.0: ) +-> $$ = nterm line (6.1-7.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-6.0: ) + $2 = nterm line (6.1-7.0: ) +-> $$ = nterm input (1.1-7.0: ) +Entering state 6 +Reading a token +Next token is token '-' (7.1: ) +Shifting token '-' (7.1: ) +Entering state 2 +Reading a token +Next token is token '-' (7.2: ) +Shifting token '-' (7.2: ) +Entering state 2 +Reading a token +Next token is token '-' (7.3: ) +Shifting token '-' (7.3: ) +Entering state 2 +Reading a token +Next token is token "number" (7.4: 1) +Shifting token "number" (7.4: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.4: 1) +-> $$ = nterm exp (7.4: 1) +Entering state 10 +Reading a token +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.3: ) + $2 = nterm exp (7.4: 1) +-> $$ = nterm exp (7.3-4: -1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.2: ) + $2 = nterm exp (7.3-4: -1) +-> $$ = nterm exp (7.2-4: 1) +Entering state 10 +Next token is token '=' (7.6: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.1: ) + $2 = nterm exp (7.2-4: 1) +-> $$ = nterm exp (7.1-4: -1) +Entering state 8 +Next token is token '=' (7.6: ) +Shifting token '=' (7.6: ) +Entering state 18 +Reading a token +Next token is token '-' (7.8: ) +Shifting token '-' (7.8: ) +Entering state 2 +Reading a token +Next token is token "number" (7.9: 1) +Shifting token "number" (7.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (7.9: 1) +-> $$ = nterm exp (7.9: 1) +Entering state 10 +Reading a token +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (7.8: ) + $2 = nterm exp (7.9: 1) +-> $$ = nterm exp (7.8-9: -1) +Entering state 27 +Next token is token '\n' (7.10-8.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (7.1-4: -1) + $2 = token '=' (7.6: ) + $3 = nterm exp (7.8-9: -1) +-> $$ = nterm exp (7.1-9: -1) +Entering state 8 +Next token is token '\n' (7.10-8.0: ) +Shifting token '\n' (7.10-8.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (7.1-9: -1) + $2 = token '\n' (7.10-8.0: ) +-> $$ = nterm line (7.1-8.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-7.0: ) + $2 = nterm line (7.1-8.0: ) +-> $$ = nterm input (1.1-8.0: ) +Entering state 6 +Reading a token +Next token is token '\n' (8.1-9.0: ) +Shifting token '\n' (8.1-9.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (8.1-9.0: ) +-> $$ = nterm line (8.1-9.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-8.0: ) + $2 = nterm line (8.1-9.0: ) +-> $$ = nterm input (1.1-9.0: ) +Entering state 6 +Reading a token +Next token is token "number" (9.1: 1) +Shifting token "number" (9.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.1: 1) +-> $$ = nterm exp (9.1: 1) +Entering state 8 +Reading a token +Next token is token '-' (9.3: ) +Shifting token '-' (9.3: ) +Entering state 19 +Reading a token +Next token is token "number" (9.5: 2) +Shifting token "number" (9.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.5: 2) +-> $$ = nterm exp (9.5: 2) +Entering state 28 +Reading a token +Next token is token '-' (9.7: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1: 1) + $2 = token '-' (9.3: ) + $3 = nterm exp (9.5: 2) +-> $$ = nterm exp (9.1-5: -1) +Entering state 8 +Next token is token '-' (9.7: ) +Shifting token '-' (9.7: ) +Entering state 19 +Reading a token +Next token is token "number" (9.9: 3) +Shifting token "number" (9.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.9: 3) +-> $$ = nterm exp (9.9: 3) +Entering state 28 +Reading a token +Next token is token '=' (9.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (9.1-5: -1) + $2 = token '-' (9.7: ) + $3 = nterm exp (9.9: 3) +-> $$ = nterm exp (9.1-9: -4) +Entering state 8 +Next token is token '=' (9.11: ) +Shifting token '=' (9.11: ) +Entering state 18 +Reading a token +Next token is token '-' (9.13: ) +Shifting token '-' (9.13: ) +Entering state 2 +Reading a token +Next token is token "number" (9.14: 4) +Shifting token "number" (9.14: 4) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (9.14: 4) +-> $$ = nterm exp (9.14: 4) +Entering state 10 +Reading a token +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 11 (line 102): + $1 = token '-' (9.13: ) + $2 = nterm exp (9.14: 4) +-> $$ = nterm exp (9.13-14: -4) +Entering state 27 +Next token is token '\n' (9.15-10.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (9.1-9: -4) + $2 = token '=' (9.11: ) + $3 = nterm exp (9.13-14: -4) +-> $$ = nterm exp (9.1-14: -4) +Entering state 8 +Next token is token '\n' (9.15-10.0: ) +Shifting token '\n' (9.15-10.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (9.1-14: -4) + $2 = token '\n' (9.15-10.0: ) +-> $$ = nterm line (9.1-10.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-9.0: ) + $2 = nterm line (9.1-10.0: ) +-> $$ = nterm input (1.1-10.0: ) +Entering state 6 +Reading a token +Next token is token "number" (10.1: 1) +Shifting token "number" (10.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.1: 1) +-> $$ = nterm exp (10.1: 1) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 +Next token is token '-' (10.3: ) +Shifting token '-' (10.3: ) +Entering state 19 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (10.5: ) +Shifting token '(' (10.5: ) Entering state 4 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token "number" (10.6: 2) +Shifting token "number" (10.6: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.6: 2) +-> $$ = nterm exp (10.6: 2) +Entering state 12 Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 29 +Next token is token '-' (10.8: ) +Shifting token '-' (10.8: ) +Entering state 19 Reading a token -Next token is token '=' () -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1111) - $2 = token '+' () - $3 = nterm exp (1111) --> $$ = nterm exp (2222) +Next token is token "number" (10.10: 3) +Shifting token "number" (10.10: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (10.10: 3) +-> $$ = nterm exp (10.10: 3) +Entering state 28 +Reading a token +Next token is token ')' (10.11: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.6: 2) + $2 = token '-' (10.8: ) + $3 = nterm exp (10.10: 3) +-> $$ = nterm exp (10.6-10: -1) +Entering state 12 +Next token is token ')' (10.11: ) +Shifting token ')' (10.11: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (10.5: ) + $2 = nterm exp (10.6-10: -1) + $3 = token ')' (10.11: ) +-> $$ = nterm exp (10.5-11: -1) +Entering state 28 +Reading a token +Next token is token '=' (10.13: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (10.1: 1) + $2 = token '-' (10.3: ) + $3 = nterm exp (10.5-11: -1) +-> $$ = nterm exp (10.1-11: 2) Entering state 8 -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (10.13: ) +Shifting token '=' (10.13: ) Entering state 18 Reading a token -Next token is token number (2222) -Shifting token number (2222) +Next token is token "number" (10.15: 2) +Shifting token "number" (10.15: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (2222) --> $$ = nterm exp (2222) + $1 = token "number" (10.15: 2) +-> $$ = nterm exp (10.15: 2) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (10.16-11.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (2222) - $2 = token '=' () - $3 = nterm exp (2222) --> $$ = nterm exp (2222) + $1 = nterm exp (10.1-11: 2) + $2 = token '=' (10.13: ) + $3 = nterm exp (10.15: 2) +-> $$ = nterm exp (10.1-15: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (10.16-11.0: ) +Shifting token '\n' (10.16-11.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2222) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm exp (10.1-15: 2) + $2 = token '\n' (10.16-11.0: ) +-> $$ = nterm line (10.1-11.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-10.0: ) + $2 = nterm line (10.1-11.0: ) +-> $$ = nterm input (1.1-11.0: ) Entering state 6 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -565. calc.at:1491: ok -stderr: -./calc.at:1494: cat stderr -input: -input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: - | 1 + 2 * 3 + !+ ++ -./calc.at:1492: $PREPARSER ./calc input -./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (11.1-12.0: ) +Shifting token '\n' (11.1-12.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (11.1-12.0: ) +-> $$ = nterm line (11.1-12.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-11.0: ) + $2 = nterm line (11.1-12.0: ) +-> $$ = nterm input (1.1-12.0: ) +Entering state 6 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token "number" (12.1: 2) +Shifting token "number" (12.1: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) + $1 = token "number" (12.1: 2) +-> $$ = nterm exp (12.1: 2) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 +Next token is token '^' (12.2: ) +Shifting token '^' (12.2: ) +Entering state 23 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) +Next token is token "number" (12.3: 2) +Shifting token "number" (12.3: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 + $1 = token "number" (12.3: 2) +-> $$ = nterm exp (12.3: 2) +Entering state 32 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '^' (12.4: ) +Shifting token '^' (12.4: ) +Entering state 23 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (12.5: 3) +Shifting token "number" (12.5: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 + $1 = token "number" (12.5: 3) +-> $$ = nterm exp (12.5: 3) +Entering state 32 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.3: 2) + $2 = token '^' (12.4: ) + $3 = nterm exp (12.5: 3) +-> $$ = nterm exp (12.3-5: 8) +Entering state 32 +Next token is token '=' (12.7: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (12.1: 2) + $2 = token '^' (12.2: ) + $3 = nterm exp (12.3-5: 8) +-> $$ = nterm exp (12.1-5: 256) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 +Next token is token '=' (12.7: ) +Shifting token '=' (12.7: ) +Entering state 18 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token "number" (12.9-11: 256) +Shifting token "number" (12.9-11: 256) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (12.9-11: 256) +-> $$ = nterm exp (12.9-11: 256) +Entering state 27 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (- *) + (1 2) = 1 -./calc.at:1494: $PREPARSER ./calc input -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -stderr: -Starting parse -Entering state 0 +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) Entering state 4 Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 +Next token is token "number" (13.2: 2) +Shifting token "number" (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) +Next token is token "number" (13.4: 2) +Shifting token "number" (13.4: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) + $1 = token "number" (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 +Next token is token "number" (13.7: 3) +Shifting token "number" (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) Entering state 18 Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (13.11-12: 64) +Shifting token "number" (13.11-12: 64) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) + $1 = token "number" (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token "end of input" (2.1: ) +Shifting token "end of input" (14.1: ) Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: cat stderr +Cleanup: popping token "end of input" (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | 1 2 +./calc.at:1494: $PREPARSER ./calc input stderr: -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS Starting parse Entering state 0 Reading a token @@ -230847,130 +230469,28 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | 1 + 2 * 3 + !- ++ -input: stderr: -./calc.at:1492: $PREPARSER ./calc input - | (1 + #) = 1111 Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '-' (1.2: ) -Shifting token '-' (1.2: ) -Entering state 2 -Reading a token -Next token is token '*' (1.4: ) -1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.4: ) -Entering state 9 -Reducing stack 0 by rule 15 (line 106): - $1 = token '-' (1.2: ) - $2 = token error (1.4: ) -Shifting token error (1.2-4: ) -Entering state 11 -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token "number" (1.10: 1) -Shifting token "number" (1.10: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.10: 1) --> $$ = nterm exp (1.10: 1) -Entering state 12 -Reading a token -Next token is token "number" (1.12: 2) -1.12: syntax error, unexpected number -Error: popping nterm exp (1.10: 1) -Shifting token error (1.10-12: ) -Entering state 11 -Next token is token "number" (1.12: 2) -Error: discarding token "number" (1.12: 2) -Reading a token -Next token is token ')' (1.13: ) -Entering state 11 -Next token is token ')' (1.13: ) -Shifting token ')' (1.13: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10-12: ) - $3 = token ')' (1.13: ) --> $$ = nterm exp (1.9-13: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.15: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-13: 1111) --> $$ = nterm exp (1.1-13: 2222) -Entering state 8 -Next token is token '=' (1.15: ) -Shifting token '=' (1.15: ) -Entering state 18 -Reading a token -Next token is token "number" (1.17: 1) -Shifting token "number" (1.17: 1) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.17: 1) --> $$ = nterm exp (1.17: 1) -Entering state 27 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-13: 2222) - $2 = token '=' (1.15: ) - $3 = nterm exp (1.17: 1) -1.1-17: error: 2222 != 1 --> $$ = nterm exp (1.1-17: 2222) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2222) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: $PREPARSER ./calc input +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) +input: +./torture.at:140: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1492: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -231037,86 +230557,23 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token "number" (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token "number" (1.3: 2) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -231128,85 +230585,9 @@ }eg ' expout || exit 77 stderr: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -622. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: LALR(1) ... +input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: Starting parse Entering state 0 Reading a token @@ -231272,8 +230653,9 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' ./calc.at:1494: cat stderr -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -231285,263 +230667,57 @@ }eg ' expout || exit 77 input: -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - | (* *) + (*) + (*) +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + | 1//2 ./calc.at:1494: $PREPARSER ./calc input stderr: ./calc.at:1492: cat stderr -./calc.at:1489: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | (#) + (#) = 2222 +./calc.at:1492: $PREPARSER ./calc input stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: - | (#) + (#) = 2222 -./torture.at:238: $PREPARSER ./input -./calc.at:1492: $PREPARSER ./calc input -input: -stderr: - | (# + 1) = 1111 -stderr: +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) Starting parse Entering state 0 Reading a token @@ -231639,10 +230815,8 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: $PREPARSER ./calc input -./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -231656,79 +230830,6 @@ Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -605. torture.at:216: stderr: - ok -stderr: -Starting parse -Entering state 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 @@ -231823,78 +230924,8 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror ./calc.at:1494: cat stderr -stdout: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 4 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Shifting token error () -Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) -Reading a token -Next token is token ')' () -Entering state 11 -Next token is token ')' () -Shifting token ')' () -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 18 -Reading a token -Next token is token number (1111) -Shifting token number (1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) -Entering state 27 -Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) -Entering state 8 -Next token is token '\n' () -Shifting token '\n' () -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input () -Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -231905,96 +230936,28 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./existing.at:74: $PREPARSER ./input -stderr: -./calc.at:1492: cat stderr -syntax error, unexpected '*', expecting NEWLINE or '{' or ';' -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: - - | 1 + 2 * 3 + !+ ++ + | error ./calc.at:1494: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -input: stderr: +./calc.at:1492: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: | (1 + #) = 1111 -./calc.at:1489: cat stderr ./calc.at:1492: $PREPARSER ./calc input -616. existing.at:74: ok +stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token "invalid token" (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token "invalid token" (1.1: ) stderr: Starting parse Entering state 0 @@ -232073,99 +231036,111 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1489: $PREPARSER ./calc input ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -stderr: - -stderr: +./calc.at:1494: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token error () -Error: discarding token error () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' () +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1494: $PREPARSER ./calc input +stderr: +./calc.at:1492: cat stderr Starting parse Entering state 0 Reading a token @@ -232177,9 +231152,9 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 Reading a token Next token is token "number" (1.5: 2) Shifting token "number" (1.5: 2) @@ -232187,50 +231162,49 @@ Reducing stack 0 by rule 5 (line 79): $1 = token "number" (1.5: 2) -> $$ = nterm exp (1.5: 2) -Entering state 29 +Entering state 27 Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (# + 1) = 1111 +./calc.at:1492: $PREPARSER ./calc input +stderr: +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) Starting parse Entering state 0 Reading a token @@ -232238,26 +231212,18 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Error: discarding token '+' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token Next token is token ')' (1.7: ) Entering state 11 @@ -232308,104 +231274,89 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 12 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 20 -Reading a token -syntax error: invalid character: '#' -Next token is token error () -Error: popping token '+' () -Error: popping nterm exp (1) -Shifting token error () +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) Entering state 11 -Next token is token error () -Error: discarding token error () +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) Reading a token -Next token is token number (1) -Error: discarding token number (1) +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) Reading a token -Next token is token ')' () +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' () - $2 = token error () - $3 = token ')' () --> $$ = nterm exp (1111) + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1111) -Shifting token number (1111) +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1111) --> $$ = nterm exp (1111) + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1111) - $2 = token '=' () - $3 = nterm exp (1111) --> $$ = nterm exp (1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1111) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () - | 1 + 2 * 3 + !- ++ -./calc.at:1494: $PREPARSER ./calc input -stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] @@ -232438,80 +231389,10 @@ input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1492: cat stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -input: -./torture.at:141: $PREPARSER ./input -./calc.at:1489: "$PERL" -pi -e 'use strict; +./calc.at:1494: cat stderr +./existing.at:74: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -232521,78 +231402,154 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 +input: +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 + | + | +1 +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1492: cat stderr stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: +stderr: + | (1 + # + 1) = 1111 ./calc.at:1492: $PREPARSER ./calc input -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) stderr: Starting parse Entering state 0 Reading a token -Next token is token "number" (1.1: 1) -Shifting token "number" (1.1: 1) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token "number" (1.5: 2) -Shifting token "number" (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token "number" (1.9: 3) -Shifting token "number" (1.9: 3) +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: -./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -232600,56 +231557,70 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -232662,15 +231633,8 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: cat stderr -623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -604. torture.at:132: ok -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ... -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1494: cat stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -232680,24 +231644,27 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: cat stderr +./calc.at:1494: $PREPARSER ./calc /dev/null +./calc.at:1492: cat stderr stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) input: -stdout: | (1 + 1) / (1 - 1) -./calc.at:1489: $PREPARSER ./calc input +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token "end of input" (1.1: ) stderr: -./calc.at:1492: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - Starting parse Entering state 0 Reading a token @@ -232705,56 +231672,102 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 Reading a token Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 Reading a token Next token is token ')' (1.7: ) -Entering state 11 +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): +Entering state 26 +Reducing stack 0 by rule 13 (line 104): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) + $2 = nterm exp (1.2-6: 2) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -232767,131 +231780,143 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: - | (#) + (#) = 2222 -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1494: cat stderr Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) Entering state 29 Reading a token -Next token is token ')' () +Next token is token ')' (1.7: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '/' () -Shifting token '/' () +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) Entering state 22 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) Entering state 19 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 28 Reading a token -Next token is token ')' () +Next token is token ')' (1.17: ) Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) Entering state 12 -Next token is token ')' () -Shifting token ')' () +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) Entering state 31 Reading a token -Next token is token '\n' () +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () -./calc.at:1489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: - +stdout: +stderr: +./torture.at:238: $PREPARSER ./input stdout: ./existing.at:74: $PREPARSER ./input stderr: +input: +stderr: ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -232902,8 +231927,46 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: +./torture.at:238: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected '*', expecting NEWLINE or '{' or ';' ./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1494: $PREPARSER ./calc input +stderr: +605. torture.at:216: ./calc.at:1492: cat stderr + ok +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] stderr: Starting parse Entering state 0 @@ -232912,84 +231975,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 +Reading a token +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 Reading a token -Next token is token ')' (1.9: ) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) +Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 +Reading a token +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 +Reading a token +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '+' (1.30: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) +Entering state 4 +Reading a token +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) +Entering state 12 +Reading a token +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 +Reading a token +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 +Reading a token +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) +Entering state 12 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 +Reading a token +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) +Entering state 8 +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) Entering state 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) Entering state 27 Reading a token -Next token is token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -233002,139 +232215,281 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: +567. calc.at:1492: ok +616. existing.at:74: ok +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 + +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token "number" (1.7: 1) +Shifting token "number" (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) Entering state 20 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.11: 1) +Shifting token "number" (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.11: 1) +-> $$ = nterm exp (1.11: 1) Entering state 29 Reading a token -Next token is token ')' () +Next token is token '+' (1.13: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1) - $2 = token '+' () - $3 = nterm exp (1) --> $$ = nterm exp (2) + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (2) - $3 = token ')' () --> $$ = nterm exp (2) +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token "number" (1.15: 1) +Shifting token "number" (1.15: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 +Reading a token +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) +Entering state 12 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 +Reading a token +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token '/' () -Shifting token '/' () -Entering state 22 +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) +Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) +Reading a token +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) +Reading a token +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.30: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) +Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 +Reading a token +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.33: 1) +Shifting token "number" (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) + $1 = token "number" (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '-' () -Shifting token '-' () -Entering state 19 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -Next token is token number (1) -Shifting token number (1) +Next token is token "number" (1.37: 2) +Shifting token "number" (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1) --> $$ = nterm exp (1) -Entering state 28 + $1 = token "number" (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token ')' () -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1) - $2 = token '-' () - $3 = nterm exp (1) --> $$ = nterm exp (0) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' () -Shifting token ')' () -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' () - $2 = nterm exp (0) - $3 = token ')' () --> $$ = nterm exp (0) -Entering state 31 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 Reading a token -Next token is token '\n' () -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (2) - $2 = token '/' () - $3 = nterm exp (0) -error: null divisor --> $$ = nterm exp (2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '\n' () -Shifting token '\n' () +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token "number" (1.46: 1) +Shifting token "number" (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (2) - $2 = token '\n' () --> $$ = nterm line () + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) +-> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): - $1 = nterm line () --> $$ = nterm input () + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input () +Shifting token "end of input" (2.1: ) Entering state 16 -Cleanup: popping token end of input () -Cleanup: popping nterm input () - | 1 + 2 * 3 = 7 - | 1 + 2 * -3 = -5 - | - | -1^2 = -1 - | (-1)^2 = 1 - | - | ---1 = -1 - | - | 1 - 2 - 3 = -4 - | 1 - (2 - 3) = 2 - | - | 2^2^3 = 256 - | (2^2)^3 = 64 -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1492: cat stderr -617. existing.at:74: ok +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) + +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error + +stderr: +./calc.at:1494: cat stderr +input-lalr.y: warning: 65 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +./existing.at:74: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +input: + | (!!) + (1 2) = 1 +./calc.at:1494: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -233143,76 +232498,90 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 Reading a token -Next token is token ')' (1.3: ) +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) Entering state 8 Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) Entering state 20 Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) Entering state 4 Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) Reading a token -Next token is token ')' (1.9: ) +Next token is token ')' (1.12: ) Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) Entering state 29 Reading a token -Next token is token '=' (1.11: ) +Next token is token '=' (1.14: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) Entering state 18 Reading a token -Next token is token "number" (1.13-16: 2222) -Shifting token "number" (1.13-16: 2222) +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) Entering state 27 Reading a token Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 -> $$ = nterm exp (1.1-16: 2222) Entering state 8 Next token is token '\n' (1.17-2.0: ) @@ -233233,42 +232602,169 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '!' (1.2: ) +Shifting token '!' (1.2: ) +Entering state 5 +Reading a token +Next token is token '!' (1.3: ) +Shifting token '!' (1.3: ) +Entering state 15 +Reducing stack 0 by rule 16 (line 107): + $1 = token '!' (1.2: ) + $2 = token '!' (1.3: ) +Shifting token error (1.2-3: ) +Entering state 11 +Reading a token +Next token is token ')' (1.4: ) +Shifting token ')' (1.4: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-3: ) + $3 = token ')' (1.4: ) +-> $$ = nterm exp (1.1-4: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.6: ) +Shifting token '+' (1.6: ) +Entering state 20 +Reading a token +Next token is token '(' (1.8: ) +Shifting token '(' (1.8: ) +Entering state 4 +Reading a token +Next token is token "number" (1.9: 1) +Shifting token "number" (1.9: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 1) +-> $$ = nterm exp (1.9: 1) +Entering state 12 +Reading a token +Next token is token "number" (1.11: 2) +1.11: syntax error, unexpected number +Error: popping nterm exp (1.9: 1) +Shifting token error (1.9-11: ) +Entering state 11 +Next token is token "number" (1.11: 2) +Error: discarding token "number" (1.11: 2) +Reading a token +Next token is token ')' (1.12: ) +Entering state 11 +Next token is token ')' (1.12: ) +Shifting token ')' (1.12: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.8: ) + $2 = token error (1.9-11: ) + $3 = token ')' (1.12: ) +-> $$ = nterm exp (1.8-12: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.14: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-4: 1111) + $2 = token '+' (1.6: ) + $3 = nterm exp (1.8-12: 1111) +-> $$ = nterm exp (1.1-12: 2222) +Entering state 8 +Next token is token '=' (1.14: ) +Shifting token '=' (1.14: ) +Entering state 18 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-12: 2222) + $2 = token '=' (1.14: ) + $3 = nterm exp (1.16: 1) +1.1-16: error: 2222 != 1 +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +stdout: +./calc.at:1492: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +625. regression.at:25: testing Trivial grammars ... input: -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 78 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 10 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + | 1 + 2 * 3 = 7 + | 1 + 2 * -3 = -5 + | + | -1^2 = -1 + | (-1)^2 = 1 + | + | ---1 = -1 + | + | 1 - 2 - 3 = -4 + | 1 - (2 - 3) = 2 + | + | 2^2^3 = 256 + | (2^2)^3 = 64 +./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: cat stderr +623. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: IELR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +624. existing.at:1460: testing GNU pic (Groff 1.18.1) Grammar: Canonical LR(1) ... +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y +input: +stderr: + | (- *) + (1 2) = 1 +./calc.at:1494: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -234105,31 +233601,8 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | (1 + # + 1) = 1111 -./calc.at:1492: $PREPARSER ./calc input -./calc.at:1489: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -234137,70 +233610,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) +Entering state 11 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 Reading a token Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Shifting token "number" (1.10: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) +Entering state 12 Reading a token -Next token is token ')' (1.11: ) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) +Reading a token +Next token is token ')' (1.13: ) +Entering state 11 +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -234213,9 +233719,8 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1489: cat stderr -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -235052,11 +234557,11 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1494: cat stderr -625. regression.at:25: testing Trivial grammars ... -./regression.at:43: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: + | 1 2 +./calc.at:1492: $PREPARSER ./calc input +stderr: +./regression.at:44: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stderr: Starting parse Entering state 0 @@ -235065,171 +234570,103 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '-' (1.2: ) +Shifting token '-' (1.2: ) +Entering state 2 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '*' (1.4: ) +1.4: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.4: ) +Entering state 9 +Reducing stack 0 by rule 15 (line 106): + $1 = token '-' (1.2: ) + $2 = token error (1.4: ) +Shifting token error (1.2-4: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) - | 1 2 -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -564. calc.at:1489: ok -./calc.at:1492: $PREPARSER ./calc input -input: - | (1 + #) = 1111 -./calc.at:1494: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token "number" (1.10: 1) +Shifting token "number" (1.10: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) + $1 = token "number" (1.10: 1) +-> $$ = nterm exp (1.10: 1) Entering state 12 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token "number" (1.12: 2) +1.12: syntax error, unexpected number +Error: popping nterm exp (1.10: 1) +Shifting token error (1.10-12: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token "number" (1.12: 2) +Error: discarding token "number" (1.12: 2) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.13: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.13: ) +Shifting token ')' (1.13: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 + $1 = token '(' (1.9: ) + $2 = token error (1.10-12: ) + $3 = token ')' (1.13: ) +-> $$ = nterm exp (1.9-13: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.15: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-13: 1111) +-> $$ = nterm exp (1.1-13: 2222) +Entering state 8 +Next token is token '=' (1.15: ) +Shifting token '=' (1.15: ) Entering state 18 Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) +Next token is token "number" (1.17: 1) +Shifting token "number" (1.17: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token "number" (1.17: 1) +-> $$ = nterm exp (1.17: 1) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-13: 2222) + $2 = token '=' (1.15: ) + $3 = nterm exp (1.17: 1) +1.1-17: error: 2222 != 1 +-> $$ = nterm exp (1.1-17: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-17: 2222) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -235242,23 +234679,6 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./calc.at:1492: cat stderr -stderr: - -./regression.at:44: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c Starting parse Entering state 0 Reading a token @@ -235274,88 +234694,7 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token number (1.3: 2) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: -626. regression.at:55: testing YYSTYPE typedef ... -./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - | (1 + 1) / (1 - 1) -./calc.at:1492: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -235366,255 +234705,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: cat stderr -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1494: cat stderr stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 -Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -input: - | 1//2 -./calc.at:1492: $PREPARSER ./calc input +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) input: + | (* *) + (*) + (*) +./calc.at:1494: $PREPARSER ./calc input ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -235625,127 +234735,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 - | (# + 1) = 1111 -./calc.at:1494: $PREPARSER ./calc input -stderr: -./calc.at:1492: cat stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) -Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:74: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c -567. calc.at:1492: ok -627. regression.at:85: testing Early token definitions with --yacc ... -stderr: -./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc -o input.c input.y -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) stderr: Starting parse Entering state 0 @@ -235754,128 +234743,42 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' Shifting token error (1.2: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token "number" (1.6: 1) -Error: discarding token "number" (1.6: 1) +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token "number" (1.11-14: 1111) -Shifting token "number" (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -./calc.at:1494: cat stderr -input: - | (1 + # + 1) = 1111 -./calc.at:1494: $PREPARSER ./calc input -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token Next token is token ')' (1.11: ) Entering state 11 @@ -235883,37 +234786,59 @@ Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -235926,53 +234851,53 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | error -./calc.at:1492: $PREPARSER ./calc input -./regression.at:116: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./calc.at:1492: cat stderr ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: stderr: Starting parse Entering state 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token Next token is token '(' (1.1: ) Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token ')' (1.5: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 Reading a token -Next token is token "number" (1.10: 1) -Error: discarding token "number" (1.10: 1) +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) Reading a token Next token is token ')' (1.11: ) Entering state 11 @@ -235980,37 +234905,59 @@ Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 Reading a token -Next token is token "number" (1.15-18: 1111) -Shifting token "number" (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -236023,14 +234970,41 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) + | 1//2 +./calc.at:1492: $PREPARSER ./calc input stderr: Starting parse Entering state 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] @@ -236119,19 +235093,32 @@ input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 ./calc.at:1494: cat stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1494: $PREPARSER ./calc input ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -236142,145 +235129,330 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -628. regression.at:127: testing Early token definitions without --yacc ... -stdout: -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -input: -./calc.at:1492: cat stderr - | (1 + 1) / (1 - 1) -./calc.at:1494: $PREPARSER ./calc input -626. regression.at:55: ok stderr: +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) Entering state 20 Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1492: cat stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) Entering state 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +input: +input: + | error +./calc.at:1492: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) + | 1 + 2 * 3 + !- ++ +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 Reading a token Next token is token '-' (1.14: ) Shifting token '-' (1.14: ) -Entering state 19 +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +Starting parse +Entering state 0 +Reading a token +Next token is token "number" (1.1: 1) +Shifting token "number" (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 + $1 = token "number" (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token "number" (1.5: 2) +Shifting token "number" (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token "number" (1.9: 3) +Shifting token "number" (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Now at end of input. -Shifting token "end of input" (2.1: ) -Entering state 16 -Cleanup: popping token "end of input" (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./calc.at:1492: cat stderr +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +stdout: input: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:162: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./existing.at:74: $PREPARSER ./input +./calc.at:1494: cat stderr stderr: | 1 = 2 = 3 ./calc.at:1492: $PREPARSER ./calc input -stdout: - -stderr: -stderr: +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror stderr: -stdout: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +input: Starting parse Entering state 0 Reading a token @@ -236310,6 +235482,12 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) +617. existing.at:74: ok + | (#) + (#) = 2222 +./calc.at:1494: $PREPARSER ./calc input +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -236317,102 +235495,84 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token "number" (1.2: 1) -Shifting token "number" (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -Next token is token "number" (1.6: 1) -Shifting token "number" (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Reading a token -Next token is token "number" (1.12: 1) -Shifting token "number" (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 Reading a token -Next token is token "number" (1.16: 1) -Shifting token "number" (1.16: 1) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token "number" (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -236425,21 +235585,10 @@ Entering state 16 Cleanup: popping token "end of input" (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:45: $CC $CFLAGS $CPPFLAGS -c -o input.o -DYYDEBUG -c input.c -627. regression.at:85: ok -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: - Starting parse Entering state 0 Reading a token @@ -236469,8 +235618,105 @@ Error: popping token '=' (1.3: ) Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1494: cat stderr -569. calc.at:1494: ok +./regression.at:45: $CC $CFLAGS $CPPFLAGS -c -o input.o -DYYDEBUG -c input.c + +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 +Reading a token +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) +Entering state 11 +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) +Reading a token +Next token is token ')' (1.9: ) +Entering state 11 +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) +Entering state 18 +Reading a token +Next token is token "number" (1.13-16: 2222) +Shifting token "number" (1.13-16: 2222) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) +Entering state 27 +Reading a token +Next token is token '\n' (1.17-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) +Entering state 8 +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -236481,20 +235727,24 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -629. regression.at:173: testing Braces parsing ... -./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1492: cat stderr +./calc.at:1494: cat stderr input: - -./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c | | +1 ./calc.at:1492: $PREPARSER ./calc input -stdout: - { tests = {{{{{{{{{{}}}}}}}}}}; } stderr: -630. regression.at:196: testing Rule Line Numbers ... +input: Starting parse Entering state 0 Reading a token @@ -236514,13 +235764,91 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) -629. regression.at:173: ok -./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -v input.y + | (1 + #) = 1111 +./calc.at:1494: $PREPARSER ./calc input ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token Next token is token '\n' (1.1-2.0: ) Shifting token '\n' (1.1-2.0: ) Entering state 3 @@ -236537,8 +235865,11 @@ 2.1: syntax error, unexpected '+' Error: popping nterm input (1.1-2.0: ) Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: - +stdout: +stderr: +./torture.at:141: $PREPARSER ./input ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -236549,31 +235880,117 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -631. regression.at:345: testing Mixing %token styles ... -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -Wall -o input.c input.y +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./torture.at:141: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1492: cat stderr -628. regression.at:127: ok ./calc.at:1492: $PREPARSER ./calc /dev/null -./regression.at:235: cat input.output +626. regression.at:55: testing YYSTYPE typedef ... +./regression.at:73: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: +604. torture.at:132: ok Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token end of input (1.1: ) +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -630. regression.at:196: ok stderr: - Starting parse Entering state 0 Reading a token Now at end of input. 1.1: syntax error, unexpected end of input Cleanup: discarding lookahead token end of input (1.1: ) -632. regression.at:437: testing Token definitions: parse.error=detailed ... +./calc.at:1494: cat stderr +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -236584,45 +236001,86 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror +input: + | (# + 1) = 1111 +./calc.at:1494: $PREPARSER ./calc input ./calc.at:1492: cat stderr -input: stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] -input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] -input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] -input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] -input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] -input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] -input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] -input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] -input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] -input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] -input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] -input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] -input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] ./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:74: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stderr: Starting parse Entering state 0 @@ -236871,9 +236329,89 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -633. regression.at:438: testing Token definitions: parse.error=verbose ... +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token "number" (1.6: 1) +Error: discarding token "number" (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token "number" (1.11-14: 1111) +Shifting token "number" (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none stderr: +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -237121,7 +236659,10 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y +./calc.at:1494: cat stderr +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +627. regression.at:85: testing Early token definitions with --yacc ... +./regression.at:115: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --yacc -o input.c input.y ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -237132,13 +236673,99 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./existing.at:74: sed 's,.*/$,,' stderr 1>&2 ./calc.at:1492: cat stderr input: + | (1 + # + 1) = 1111 +./calc.at:1494: $PREPARSER ./calc input +stderr: +input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) | (!!) + (1 2) = 1 ./calc.at:1492: $PREPARSER ./calc input +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror Starting parse Entering state 0 Reading a token @@ -237250,9 +236877,105 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token "number" (1.10: 1) +Error: discarding token "number" (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token "number" (1.15-18: 1111) +Shifting token "number" (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./regression.at:116: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c stderr: +stderr: +stdout: +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -237364,96 +237087,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] -input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] -input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] -input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] -input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] -input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] -input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] -input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] -input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] -input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] -input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] -input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] -input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] -input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] -input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] -input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] -input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] -input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] -input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] -input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] -input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -stderr: +626. regression.at:55: ok +./calc.at:1494: cat stderr ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -237464,22 +237099,134 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence] -input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence] -./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +input: ./calc.at:1492: cat stderr -./regression.at:357: sed 's,.*/$,,' stderr 1>&2 -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -634. regression.at:447: testing Characters Escapes ... -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror -./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y + + | (1 + 1) / (1 - 1) +./calc.at:1494: $PREPARSER ./calc input +stderr: +stderr: input: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) | (- *) + (1 2) = 1 -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error ./calc.at:1492: $PREPARSER ./calc input +stdout: stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -237596,18 +237343,124 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: +625. regression.at:25: ok ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:25.8-14: note: previous declaration - 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -./regression.at:437: sed 's,.*/$,,' stderr 1>&2 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token "number" (1.2: 1) +Shifting token "number" (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token "number" (1.6: 1) +Shifting token "number" (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token "number" (1.12: 1) +Shifting token "number" (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token "number" (1.16: 1) +Shifting token "number" (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token "number" (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token "end of input" (2.1: ) +Entering state 16 +Cleanup: popping token "end of input" (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stderr: Starting parse Entering state 0 @@ -237725,19 +237578,274 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: cat stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +569. calc.at:1494: ok +./calc.at:1492: cat stderr +input: + | (* *) + (*) + (*) +./calc.at:1492: $PREPARSER ./calc input +628. regression.at:127: testing Early token definitions without --yacc ... + +stderr: +./regression.at:161: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) stdout: -./calc.at:1491: "$PERL" -ne ' - chomp; - print "$ARGV:$.: {$_}\n" - if (# No starting/ending empty lines. - (eof || $. == 1) && /^\s*$/ - # No trailing space. - || /\s$/ - # No tabs. - || /\t/ - )' calc.cc calc.hh - +627. regression.at:85: ok +629. regression.at:173: testing Braces parsing ... +./regression.at:185: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token '*' (1.2: ) +1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token '*' (1.2: ) +Error: discarding token '*' (1.2: ) +Reading a token +Next token is token '*' (1.4: ) +Error: discarding token '*' (1.4: ) +Reading a token +Next token is token ')' (1.5: ) +Entering state 11 +Next token is token ')' (1.5: ) +Shifting token ')' (1.5: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-4: ) + $3 = token ')' (1.5: ) +-> $$ = nterm exp (1.1-5: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.7: ) +Shifting token '+' (1.7: ) +Entering state 20 +Reading a token +Next token is token '(' (1.9: ) +Shifting token '(' (1.9: ) +Entering state 4 +Reading a token +Next token is token '*' (1.10: ) +1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.10: ) +Entering state 11 +Next token is token '*' (1.10: ) +Error: discarding token '*' (1.10: ) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.9: ) + $2 = token error (1.10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.9-11: 1111) +Entering state 29 +Reading a token +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-5: 1111) + $2 = token '+' (1.7: ) + $3 = nterm exp (1.9-11: 1111) +-> $$ = nterm exp (1.1-11: 2222) +Entering state 8 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 +Reading a token +Next token is token '(' (1.15: ) +Shifting token '(' (1.15: ) +Entering state 4 +Reading a token +Next token is token '*' (1.16: ) +1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.16: ) +Entering state 11 +Next token is token '*' (1.16: ) +Error: discarding token '*' (1.16: ) +Reading a token +Next token is token ')' (1.17: ) +Entering state 11 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.15: ) + $2 = token error (1.16: ) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.15-17: 1111) +Entering state 29 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-11: 2222) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15-17: 1111) +-> $$ = nterm exp (1.1-17: 3333) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 3333) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -237748,7 +237856,99 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none + +./calc.at:1492: cat stderr +630. regression.at:196: testing Rule Line Numbers ... +./regression.at:232: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -v input.y +stderr: +input: +./regression.at:162: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c + | 1 + 2 * 3 + !+ ++ +./calc.at:1492: $PREPARSER ./calc input +stdout: +./calc.at:1494: "$PERL" -ne ' + chomp; + print "$ARGV:$.: {$_}\n" + if (# No starting/ending empty lines. + (eof || $. == 1) && /^\s*$/ + # No trailing space. + || /\s$/ + # No tabs. + || /\t/ + )' calc.cc calc.hh + +./regression.at:187: grep 'tests = {{{{{{{{{{}}}}}}}}}};' input.c +stderr: +stdout: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + { tests = {{{{{{{{{{}}}}}}}}}}; } +629. regression.at:173: ok input: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 @@ -237763,11 +237963,168 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1491: $PREPARSER ./calc input -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error -./calc.at:1492: cat stderr -./regression.at:466: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c +./calc.at:1494: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): + $1 = token '!' (1.13: ) + $2 = token '+' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +./regression.at:235: cat input.output +stderr: +input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.13: error: empty rule without %empty [-Werror=empty-rule] +input.y:309.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:382.14: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.11-48: error: rule useless in parser due to conflicts [-Werror=other] +input.y:154.1-5: error: useless associativity for LABEL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for VARIABLE, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for NUMBER, use %precedence [-Werror=precedence] +input.y:141.1-5: error: useless associativity for TEXT, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for ORDINAL, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LAST, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for UP, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOWN, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for BOX, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for CIRCLE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ELLIPSE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARC, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for LINE, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for ARROW, use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for SPLINE, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for HEIGHT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for RADIUS, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for WIDTH, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for DIAMETER, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for FROM, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for TO, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for AT, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for SOLID [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DOTTED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for DASHED, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for CHOP, use %precedence [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for LJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for RJUST [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for ABOVE [-Werror=precedence] +input.y:147.1-5: error: useless precedence and associativity for BELOW [-Werror=precedence] +input.y:176.1-5: error: useless associativity for OF, use %precedence [-Werror=precedence] +input.y:176.1-5: error: useless associativity for BETWEEN, use %precedence [-Werror=precedence] +input.y:177.1-5: error: useless associativity for AND, use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for HERE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_N, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_E, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_W, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_S, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SE, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_NW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_SW, use %precedence [-Werror=precedence] +input.y:166.1-5: error: useless associativity for DOT_C, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_START, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for DOT_END, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for COS, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for ATAN2, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for LOG, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for EXP, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SQRT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MAX, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for K_MIN, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for INT, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for RAND, use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for SRAND, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for TOP, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for BOTTOM, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for UPPER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for LOWER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for LEFT_CORNER, use %precedence [-Werror=precedence] +input.y:167.1-5: error: useless associativity for RIGHT_CORNER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for NORTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for SOUTH, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for EAST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for WEST, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for CENTER, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for END, use %precedence [-Werror=precedence] +input.y:168.1-5: error: useless associativity for START, use %precedence [-Werror=precedence] +input.y:140.1-5: error: useless associativity for PLOT, use %precedence [-Werror=precedence] +input.y:162.1-5: error: useless associativity for THICKNESS, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless associativity for FILL, use %precedence [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for COLORED [-Werror=precedence] +input.y:153.1-5: error: useless precedence and associativity for OUTLINED [-Werror=precedence] +input.y:141.1-5: error: useless associativity for SPRINTF, use %precedence [-Werror=precedence] +input.y:137.1-5: error: useless associativity for '.', use %precedence [-Werror=precedence] +input.y:156.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:157.1-5: error: useless associativity for '`', use %precedence [-Werror=precedence] +input.y:159.1-5: error: useless associativity for '[', use %precedence [-Werror=precedence] +input.y:170.1-5: error: useless associativity for ',', use %precedence [-Werror=precedence] +input.y:181.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +input: +631. regression.at:345: testing Mixing %token styles ... stderr: + | 1 + 2 * 3 + !- ++ +./calc.at:1492: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -238604,9 +238961,79 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +630. regression.at:196: ok +./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -Wall -o input.c input.y +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: -input: +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) +Entering state 20 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 +Reading a token +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 +Reading a token +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 +Reading a token +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) +Entering state 8 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 +Reading a token +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) stderr: Starting parse Entering state 0 @@ -239444,278 +239871,13 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) - | (* *) + (*) + (*) -./calc.at:1492: $PREPARSER ./calc input -input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:25.8-14: note: previous declaration - 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~ -input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] - 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -stderr: +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error + | 1 2 -./calc.at:1491: $PREPARSER ./calc input -stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:438: sed 's,.*/$,,' stderr 1>&2 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none -stderr: -stderr: -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token '*' (1.2: ) -1.2: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token '*' (1.2: ) -Error: discarding token '*' (1.2: ) -Reading a token -Next token is token '*' (1.4: ) -Error: discarding token '*' (1.4: ) -Reading a token -Next token is token ')' (1.5: ) -Entering state 11 -Next token is token ')' (1.5: ) -Shifting token ')' (1.5: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-4: ) - $3 = token ')' (1.5: ) --> $$ = nterm exp (1.1-5: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.7: ) -Shifting token '+' (1.7: ) -Entering state 20 -Reading a token -Next token is token '(' (1.9: ) -Shifting token '(' (1.9: ) -Entering state 4 -Reading a token -Next token is token '*' (1.10: ) -1.10: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.10: ) -Entering state 11 -Next token is token '*' (1.10: ) -Error: discarding token '*' (1.10: ) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.9: ) - $2 = token error (1.10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.9-11: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-5: 1111) - $2 = token '+' (1.7: ) - $3 = nterm exp (1.9-11: 1111) --> $$ = nterm exp (1.1-11: 2222) -Entering state 8 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token '(' (1.15: ) -Shifting token '(' (1.15: ) -Entering state 4 -Reading a token -Next token is token '*' (1.16: ) -1.16: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.16: ) -Entering state 11 -Next token is token '*' (1.16: ) -Error: discarding token '*' (1.16: ) -Reading a token -Next token is token ')' (1.17: ) -Entering state 11 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.15: ) - $2 = token error (1.16: ) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.15-17: 1111) -Entering state 29 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-11: 2222) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15-17: 1111) --> $$ = nterm exp (1.1-17: 3333) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 3333) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stdout: -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +./calc.at:1494: $PREPARSER ./calc input stderr: -625. regression.at:25: ok Starting parse Entering state 0 Reading a token @@ -239731,134 +239893,8 @@ 1.3: syntax error, unexpected number Error: popping nterm exp (1.1: 1) Cleanup: discarding lookahead token number (1.3: 2) -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: cat stderr -631. regression.at:345: ok - -stderr: -./calc.at:1491: cat stderr -stdout: -input: - | 1 + 2 * 3 + !+ ++ -./calc.at:1492: $PREPARSER ./calc input -634. regression.at:447: ok -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -input: - | 1//2 -./calc.at:1491: $PREPARSER ./calc input - -stderr: -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - stderr: +./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y Starting parse Entering state 0 Reading a token @@ -239916,40 +239952,17 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) + $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -stderr: -input: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) - | 1 + 2 * 3 + !- ++ -./calc.at:1492: $PREPARSER ./calc input -stderr: -./calc.at:1491: "$PERL" -pi -e 'use strict; +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -239959,74 +239972,8 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none stderr: +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Werror Starting parse Entering state 0 Reading a token @@ -240038,69 +239985,14 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): - $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) -636. regression.at:661: testing Web2c Actions ... -./regression.at:674: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y -635. regression.at:480: testing Web2c Report ... -./regression.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v input.y -./calc.at:1491: cat stderr -637. regression.at:812: testing Useless Tokens ... -./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -o input.c input.y -./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none -./calc.at:1492: "$PERL" -pi -e 'use strict; +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +./calc.at:1492: cat stderr +stderr: +stdout: +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -240110,33 +240002,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1494: cat stderr input: - | error -./regression.at:437: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1491: $PREPARSER ./calc input -stderr: -./regression.at:679: cat tables.c -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) -./calc.at:1492: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -636. regression.at:661: ok -input: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +628. regression.at:127: ok | (#) + (#) = 2222 ./calc.at:1492: $PREPARSER ./calc input -./regression.at:506: cat input.output -./regression.at:438: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: Starting parse Entering state 0 @@ -240235,25 +240105,13 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -635. regression.at:480: - ok -./calc.at:1491: cat stderr -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: input: - | 1 = 2 = 3 -./calc.at:1491: $PREPARSER ./calc input + | 1//2 +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: $PREPARSER ./calc input +633. regression.at:438: testing Token definitions: parse.error=verbose ... +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y + stderr: Starting parse Entering state 0 @@ -240266,24 +240124,18 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' (1.3: ) +stderr: +632. regression.at:437: testing Token definitions: parse.error=detailed ... +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o input.c input.y Starting parse Entering state 0 Reading a token @@ -240381,22 +240233,9 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 -./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - Starting parse Entering state 0 Reading a token @@ -240408,28 +240247,30 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) +Cleanup: discarding lookahead token '/' (1.3: ) +input.y:3.1-5: error: useless precedence and associativity for "||" [-Werror=precedence] +input.y:3.1-5: error: useless precedence and associativity for "<=" [-Werror=precedence] +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1492: cat stderr -638. regression.at:1143: testing Dancer ... -./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y -./calc.at:1491: "$PERL" -pi -e 'use strict; +./regression.at:357: sed 's,.*/$,,' stderr 1>&2 +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -240442,8 +240283,9 @@ input: | (1 + #) = 1111 ./calc.at:1492: $PREPARSER ./calc input +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror +./calc.at:1494: cat stderr stderr: -./calc.at:1491: cat stderr Starting parse Entering state 0 Reading a token @@ -240521,15 +240363,22 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=error +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Werror ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -639. regression.at:1144: testing Dancer %glr-parser ... -./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 input: - | - | +1 + | error +./calc.at:1494: $PREPARSER ./calc input +634. regression.at:447: testing Characters Escapes ... +./regression.at:465: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y stderr: -./calc.at:1491: $PREPARSER ./calc input +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) Starting parse Entering state 0 Reading a token @@ -240607,6 +240456,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} @@ -240621,54 +240471,51 @@ Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 -./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +stderr: +input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:25.8-14: note: previous declaration + 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y -Wnone,none -Werror --trace=none ./calc.at:1492: cat stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1143: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -input: +./regression.at:438: sed 's,.*/$,,' stderr 1>&2 stderr: +input.y:26.8-14: error: symbol SPECIAL redeclared [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:25.8-14: note: previous declaration + 25 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~ +input.y:26.16-63: error: symbol "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" used more than once as a literal string [-Werror=other] + 26 | %token SPECIAL "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +./calc.at:1494: cat stderr +input: | (# + 1) = 1111 +./regression.at:437: sed 's,.*/$,,' stderr 1>&2 ./calc.at:1492: $PREPARSER ./calc input -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) -./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y stderr: -./regression.at:1144: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +input: +./regression.at:466: $CC $CFLAGS $CPPFLAGS -c -o input.o input.c Starting parse Entering state 0 Reading a token @@ -240738,20 +240585,42 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 + | 1 = 2 = 3 +./calc.at:1494: $PREPARSER ./calc input ./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./calc.at:1491: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=error +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -240821,15 +240690,47 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: $PREPARSER ./calc /dev/null -./regression.at:917: cat tables.c +./regression.at:357: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -v -Wall -o input.c input.y --warnings=none -Werror --trace=none stderr: Starting parse Entering state 0 Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 ./calc.at:1492: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -240840,22 +240741,15 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -637. regression.at:812: ok -stderr: +./torture.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./calc.at:1492: cat stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) stderr: +./calc.at:1494: cat stderr input: -stdout: | (1 + # + 1) = 1111 +stdout: ./calc.at:1492: $PREPARSER ./calc input -./calc.at:1494: "$PERL" -ne ' +./calc.at:1491: "$PERL" -ne ' chomp; print "$ARGV:$.: {$_}\n" if (# No starting/ending empty lines. @@ -240866,105 +240760,7 @@ || /\t/ )' calc.cc calc.hh -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: cat stderr -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr input: -stderr: | 1 + 2 * 3 = 7 | 1 + 2 * -3 = -5 | @@ -240978,7 +240774,10 @@ | | 2^2^3 = 256 | (2^2)^3 = 64 -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +stderr: +631. regression.at:345: ok +input: Starting parse Entering state 0 Reading a token @@ -241062,22 +240861,10 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1491: $PREPARSER ./calc input -stderr: -stderr: + | + | +1 +./calc.at:1494: $PREPARSER ./calc input stderr: -./calc.at:1492: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -241914,241 +241701,103 @@ Entering state 16 Cleanup: popping token end of input (14.1: ) Cleanup: popping nterm input (1.1-14.0: ) +stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token ')' (1.2: ) -1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Shifting token error (1.2: ) -Entering state 11 -Next token is token ')' (1.2: ) -Shifting token ')' (1.2: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.2: ) --> $$ = nterm exp (1.1-2: 1111) -Entering state 8 +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +Starting parse +Entering state 0 Reading a token -Next token is token '(' (1.6: ) -Shifting token '(' (1.6: ) +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.7: 1) -Shifting token number (1.7: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.7: 1) --> $$ = nterm exp (1.7: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.9: ) -Shifting token '+' (1.9: ) -Entering state 20 -Reading a token -Next token is token number (1.11: 1) -Shifting token number (1.11: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11: 1) --> $$ = nterm exp (1.11: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.13: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7: 1) - $2 = token '+' (1.9: ) - $3 = nterm exp (1.11: 1) --> $$ = nterm exp (1.7-11: 2) -Entering state 12 -Next token is token '+' (1.13: ) -Shifting token '+' (1.13: ) -Entering state 20 -Reading a token -Next token is token number (1.15: 1) -Shifting token number (1.15: 1) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15: 1) --> $$ = nterm exp (1.15: 1) -Entering state 29 -Reading a token -Next token is token '+' (1.17: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.7-11: 2) - $2 = token '+' (1.13: ) - $3 = nterm exp (1.15: 1) --> $$ = nterm exp (1.7-15: 3) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 -Next token is token '+' (1.17: ) -Shifting token '+' (1.17: ) -Entering state 20 -Reading a token -Next token is token ')' (1.18: ) -1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' -Error: popping token '+' (1.17: ) -Error: popping nterm exp (1.7-15: 3) -Shifting token error (1.7-18: ) -Entering state 11 -Next token is token ')' (1.18: ) -Shifting token ')' (1.18: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.6: ) - $2 = token error (1.7-18: ) - $3 = token ')' (1.18: ) --> $$ = nterm exp (1.6-18: 1111) -Entering state 29 Reading a token -Next token is token '+' (1.20: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-2: 1111) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6-18: 1111) --> $$ = nterm exp (1.1-18: 2222) -Entering state 8 -Next token is token '+' (1.20: ) -Shifting token '+' (1.20: ) +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token '(' (1.22: ) -Shifting token '(' (1.22: ) -Entering state 4 -Reading a token -Next token is token '*' (1.23: ) -1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Shifting token error (1.23: ) +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) Entering state 11 -Next token is token '*' (1.23: ) -Error: discarding token '*' (1.23: ) +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token -Next token is token '*' (1.25: ) -Error: discarding token '*' (1.25: ) +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) Reading a token -Next token is token '*' (1.27: ) -Error: discarding token '*' (1.27: ) +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) Reading a token -Next token is token ')' (1.28: ) +Next token is token ')' (1.11: ) Entering state 11 -Next token is token ')' (1.28: ) -Shifting token ')' (1.28: ) +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.22: ) - $2 = token error (1.23-27: ) - $3 = token ')' (1.28: ) --> $$ = nterm exp (1.22-28: 1111) -Entering state 29 -Reading a token -Next token is token '+' (1.30: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-18: 2222) - $2 = token '+' (1.20: ) - $3 = nterm exp (1.22-28: 1111) --> $$ = nterm exp (1.1-28: 3333) + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) Entering state 8 -Next token is token '+' (1.30: ) -Shifting token '+' (1.30: ) -Entering state 20 -Reading a token -Next token is token '(' (1.32: ) -Shifting token '(' (1.32: ) -Entering state 4 -Reading a token -Next token is token number (1.33: 1) -Shifting token number (1.33: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.33: 1) --> $$ = nterm exp (1.33: 1) -Entering state 12 -Reading a token -Next token is token '*' (1.35: ) -Shifting token '*' (1.35: ) -Entering state 21 -Reading a token -Next token is token number (1.37: 2) -Shifting token number (1.37: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.37: 2) --> $$ = nterm exp (1.37: 2) -Entering state 30 -Reading a token -Next token is token '*' (1.39: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.33: 1) - $2 = token '*' (1.35: ) - $3 = nterm exp (1.37: 2) --> $$ = nterm exp (1.33-37: 2) -Entering state 12 -Next token is token '*' (1.39: ) -Shifting token '*' (1.39: ) -Entering state 21 -Reading a token -Next token is token '*' (1.41: ) -1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' -Error: popping token '*' (1.39: ) -Error: popping nterm exp (1.33-37: 2) -Shifting token error (1.33-41: ) -Entering state 11 -Next token is token '*' (1.41: ) -Error: discarding token '*' (1.41: ) -Reading a token -Next token is token ')' (1.42: ) -Entering state 11 -Next token is token ')' (1.42: ) -Shifting token ')' (1.42: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.32: ) - $2 = token error (1.33-41: ) - $3 = token ')' (1.42: ) --> $$ = nterm exp (1.32-42: 1111) -Entering state 29 Reading a token -Next token is token '=' (1.44: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-28: 3333) - $2 = token '+' (1.30: ) - $3 = nterm exp (1.32-42: 1111) --> $$ = nterm exp (1.1-42: 4444) -Entering state 8 -Next token is token '=' (1.44: ) -Shifting token '=' (1.44: ) +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) Entering state 18 Reading a token -Next token is token number (1.46: 1) -Shifting token number (1.46: 1) +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.46: 1) --> $$ = nterm exp (1.46: 1) + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-42: 4444) - $2 = token '=' (1.44: ) - $3 = nterm exp (1.46: 1) -1.1-46: error: 4444 != 1 --> $$ = nterm exp (1.1-46: 4444) + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) Entering state 8 -Next token is token '\n' (1.47-2.0: ) -Shifting token '\n' (1.47-2.0: ) +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-46: 4444) - $2 = token '\n' (1.47-2.0: ) + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -242161,16 +241810,32 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: cat stderr -./regression.at:437: $PREPARSER ./input -stderr: + stderr: Starting parse Entering state 0 Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y -Wnone,none -Werror --trace=none +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +Starting parse +Entering state 0 +Reading a token Next token is token number (1.1: 1) Shifting token number (1.1: 1) Entering state 1 @@ -242885,129 +242550,494 @@ -> $$ = nterm exp (12.9-11: 256) Entering state 27 Reading a token -Next token is token '\n' (12.12-13.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (12.1-5: 256) - $2 = token '=' (12.7: ) - $3 = nterm exp (12.9-11: 256) --> $$ = nterm exp (12.1-11: 256) +Next token is token '\n' (12.12-13.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (12.1-5: 256) + $2 = token '=' (12.7: ) + $3 = nterm exp (12.9-11: 256) +-> $$ = nterm exp (12.1-11: 256) +Entering state 8 +Next token is token '\n' (12.12-13.0: ) +Shifting token '\n' (12.12-13.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (12.1-11: 256) + $2 = token '\n' (12.12-13.0: ) +-> $$ = nterm line (12.1-13.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-12.0: ) + $2 = nterm line (12.1-13.0: ) +-> $$ = nterm input (1.1-13.0: ) +Entering state 6 +Reading a token +Next token is token '(' (13.1: ) +Shifting token '(' (13.1: ) +Entering state 4 +Reading a token +Next token is token number (13.2: 2) +Shifting token number (13.2: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.2: 2) +-> $$ = nterm exp (13.2: 2) +Entering state 12 +Reading a token +Next token is token '^' (13.3: ) +Shifting token '^' (13.3: ) +Entering state 23 +Reading a token +Next token is token number (13.4: 2) +Shifting token number (13.4: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.4: 2) +-> $$ = nterm exp (13.4: 2) +Entering state 32 +Reading a token +Next token is token ')' (13.5: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.2: 2) + $2 = token '^' (13.3: ) + $3 = nterm exp (13.4: 2) +-> $$ = nterm exp (13.2-4: 4) +Entering state 12 +Next token is token ')' (13.5: ) +Shifting token ')' (13.5: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (13.1: ) + $2 = nterm exp (13.2-4: 4) + $3 = token ')' (13.5: ) +-> $$ = nterm exp (13.1-5: 4) +Entering state 8 +Reading a token +Next token is token '^' (13.6: ) +Shifting token '^' (13.6: ) +Entering state 23 +Reading a token +Next token is token number (13.7: 3) +Shifting token number (13.7: 3) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.7: 3) +-> $$ = nterm exp (13.7: 3) +Entering state 32 +Reading a token +Next token is token '=' (13.9: ) +Reducing stack 0 by rule 12 (line 103): + $1 = nterm exp (13.1-5: 4) + $2 = token '^' (13.6: ) + $3 = nterm exp (13.7: 3) +-> $$ = nterm exp (13.1-7: 64) +Entering state 8 +Next token is token '=' (13.9: ) +Shifting token '=' (13.9: ) +Entering state 18 +Reading a token +Next token is token number (13.11-12: 64) +Shifting token number (13.11-12: 64) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (13.11-12: 64) +-> $$ = nterm exp (13.11-12: 64) +Entering state 27 +Reading a token +Next token is token '\n' (13.13-14.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (13.1-7: 64) + $2 = token '=' (13.9: ) + $3 = nterm exp (13.11-12: 64) +-> $$ = nterm exp (13.1-12: 64) +Entering state 8 +Next token is token '\n' (13.13-14.0: ) +Shifting token '\n' (13.13-14.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (13.1-12: 64) + $2 = token '\n' (13.13-14.0: ) +-> $$ = nterm line (13.1-14.0: ) +Entering state 17 +Reducing stack 0 by rule 2 (line 70): + $1 = nterm input (1.1-13.0: ) + $2 = nterm line (13.1-14.0: ) +-> $$ = nterm input (1.1-14.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (14.1: ) +Entering state 16 +Cleanup: popping token end of input (14.1: ) +Cleanup: popping nterm input (1.1-14.0: ) +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + | 1 2 +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1492: cat stderr +./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 +./calc.at:1494: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +./calc.at:1494: $PREPARSER ./calc /dev/null +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +634. regression.at:447: ok +input: + | (1 + 1) / (1 - 1) +./calc.at:1492: $PREPARSER ./calc input +stderr: +635. regression.at:480: testing Web2c Report ... +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./existing.at:808: sed 's/^%define lr.type .*$//' input.y > input-lalr.y +stderr: +./regression.at:505: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v input.y +stderr: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token number (1.3: 2) +1.3: syntax error, unexpected number +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token number (1.3: 2) +./regression.at:437: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +./regression.at:438: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret -o input.c input.y --warnings=none -Werror --trace=none +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (12.12-13.0: ) -Shifting token '\n' (12.12-13.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (12.1-11: 256) - $2 = token '\n' (12.12-13.0: ) --> $$ = nterm line (12.1-13.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-12.0: ) - $2 = nterm line (12.1-13.0: ) --> $$ = nterm input (1.1-13.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token -Next token is token '(' (13.1: ) -Shifting token '(' (13.1: ) +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +stderr: +Starting parse +Entering state 0 +Reading a token +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (13.2: 2) -Shifting token number (13.2: 2) +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.2: 2) --> $$ = nterm exp (13.2: 2) + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) Entering state 12 Reading a token -Next token is token '^' (13.3: ) -Shifting token '^' (13.3: ) -Entering state 23 +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 Reading a token -Next token is token number (13.4: 2) -Shifting token number (13.4: 2) +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.4: 2) --> $$ = nterm exp (13.4: 2) -Entering state 32 + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 Reading a token -Next token is token ')' (13.5: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.2: 2) - $2 = token '^' (13.3: ) - $3 = nterm exp (13.4: 2) --> $$ = nterm exp (13.2-4: 4) +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) Entering state 12 -Next token is token ')' (13.5: ) -Shifting token ')' (13.5: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 26 Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (13.1: ) - $2 = nterm exp (13.2-4: 4) - $3 = token ')' (13.5: ) --> $$ = nterm exp (13.1-5: 4) + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) Entering state 8 Reading a token -Next token is token '^' (13.6: ) -Shifting token '^' (13.6: ) -Entering state 23 +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 Reading a token -Next token is token number (13.7: 3) -Shifting token number (13.7: 3) +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.7: 3) --> $$ = nterm exp (13.7: 3) -Entering state 32 + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 Reading a token -Next token is token '=' (13.9: ) -Reducing stack 0 by rule 12 (line 103): - $1 = nterm exp (13.1-5: 4) - $2 = token '^' (13.6: ) - $3 = nterm exp (13.7: 3) --> $$ = nterm exp (13.1-7: 64) -Entering state 8 -Next token is token '=' (13.9: ) -Shifting token '=' (13.9: ) -Entering state 18 +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 Reading a token -Next token is token number (13.11-12: 64) -Shifting token number (13.11-12: 64) +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (13.11-12: 64) --> $$ = nterm exp (13.11-12: 64) -Entering state 27 + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 Reading a token -Next token is token '\n' (13.13-14.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (13.1-7: 64) - $2 = token '=' (13.9: ) - $3 = nterm exp (13.11-12: 64) --> $$ = nterm exp (13.1-12: 64) +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) Entering state 8 -Next token is token '\n' (13.13-14.0: ) -Shifting token '\n' (13.13-14.0: ) +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (13.1-12: 64) - $2 = token '\n' (13.13-14.0: ) --> $$ = nterm line (13.1-14.0: ) -Entering state 17 -Reducing stack 0 by rule 2 (line 70): - $1 = nterm input (1.1-13.0: ) - $2 = nterm line (13.1-14.0: ) --> $$ = nterm input (1.1-14.0: ) + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) Entering state 6 Reading a token Now at end of input. -Shifting token end of input (14.1: ) +Shifting token end of input (2.1: ) Entering state 16 -Cleanup: popping token end of input (14.1: ) -Cleanup: popping nterm input (1.1-14.0: ) -stderr: +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: cat stderr +./calc.at:1494: cat stderr +./calc.at:1492: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./regression.at:506: cat input.output +./regression.at:437: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +input: +./calc.at:1492: cat stderr + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 +./calc.at:1494: $PREPARSER ./calc input +./regression.at:438: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + | 1//2 +./calc.at:1491: $PREPARSER ./calc input stderr: -syntax error, unexpected a, expecting ∃¬∩∪∀ -./regression.at:437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +635. regression.at:480: stderr: + ok Starting parse Entering state 0 Reading a token @@ -243255,25 +243285,40 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: -input: -input: - | 1 2 -./regression.at:438: $PREPARSER ./input -640. regression.at:1145: testing Dancer lalr1.cc ... -./calc.at:1494: $PREPARSER ./calc input -./regression.at:1145: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.cc dancer.y - | (1 + 1) / (1 - 1) -./calc.at:1492: $PREPARSER ./calc input -stderr: -stderr: -syntax error, unexpected a, expecting "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" -./regression.at:438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '/' (1.3: ) +568. calc.at:1492: ok +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +636. regression.at:661: testing Web2c Actions ... +./regression.at:674: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -v -o input.c input.y stderr: input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +stdout: +stderr: stderr: +./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' + Starting parse Entering state 0 Reading a token @@ -243285,22 +243330,15 @@ -> $$ = nterm exp (1.1: 1) Entering state 8 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number +Next token is token '/' (1.2: ) +Shifting token '/' (1.2: ) +Entering state 22 +Reading a token +Next token is token '/' (1.3: ) +1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' +Error: popping token '/' (1.2: ) Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -632. regression.at:437: ok -stdout: -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +Cleanup: discarding lookahead token '/' (1.3: ) Starting parse Entering state 0 Reading a token @@ -243308,241 +243346,234 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token ')' (1.2: ) +1.2: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Shifting token error (1.2: ) +Entering state 11 +Next token is token ')' (1.2: ) +Shifting token ')' (1.2: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.2: ) +-> $$ = nterm exp (1.1-2: 1111) +Entering state 8 Reading a token Next token is token '+' (1.4: ) Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) +Next token is token '(' (1.6: ) +Shifting token '(' (1.6: ) +Entering state 4 +Reading a token +Next token is token number (1.7: 1) +Shifting token number (1.7: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 -Reading a token -Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) + $1 = token number (1.7: 1) +-> $$ = nterm exp (1.7: 1) Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) -Entering state 8 -Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 +Next token is token '+' (1.9: ) +Shifting token '+' (1.9: ) +Entering state 20 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.11: 1) +Shifting token number (1.11: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 + $1 = token number (1.11: 1) +-> $$ = nterm exp (1.11: 1) +Entering state 29 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '+' (1.13: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7: 1) + $2 = token '+' (1.9: ) + $3 = nterm exp (1.11: 1) +-> $$ = nterm exp (1.7-11: 2) +Entering state 12 +Next token is token '+' (1.13: ) +Shifting token '+' (1.13: ) +Entering state 20 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.15: 1) +Shifting token number (1.15: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 + $1 = token number (1.15: 1) +-> $$ = nterm exp (1.15: 1) +Entering state 29 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '+' (1.17: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.7-11: 2) + $2 = token '+' (1.13: ) + $3 = nterm exp (1.15: 1) +-> $$ = nterm exp (1.7-15: 3) Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 -Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) -Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.17: ) +Shifting token '+' (1.17: ) +Entering state 20 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1492: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -633. regression.at:438: ok -./calc.at:1491: cat stderr -stderr: -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none -stderr: -./existing.at:808: grep '^State.*conflicts:' input.output -Starting parse -Entering state 0 +Next token is token ')' (1.18: ) +1.18: syntax error, unexpected ')', expecting number or '-' or '(' or '!' +Error: popping token '+' (1.17: ) +Error: popping nterm exp (1.7-15: 3) +Shifting token error (1.7-18: ) +Entering state 11 +Next token is token ')' (1.18: ) +Shifting token ')' (1.18: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.6: ) + $2 = token error (1.7-18: ) + $3 = token ')' (1.18: ) +-> $$ = nterm exp (1.6-18: 1111) +Entering state 29 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) +Next token is token '+' (1.20: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-2: 1111) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6-18: 1111) +-> $$ = nterm exp (1.1-18: 2222) Entering state 8 +Next token is token '+' (1.20: ) +Shifting token '+' (1.20: ) +Entering state 20 Reading a token -Next token is token number (1.3: 2) -1.3: syntax error, unexpected number -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token number (1.3: 2) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.22: ) +Shifting token '(' (1.22: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +Next token is token '*' (1.23: ) +1.23: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Shifting token error (1.23: ) +Entering state 11 +Next token is token '*' (1.23: ) +Error: discarding token '*' (1.23: ) Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Next token is token '*' (1.25: ) +Error: discarding token '*' (1.25: ) Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) +Next token is token '*' (1.27: ) +Error: discarding token '*' (1.27: ) +Reading a token +Next token is token ')' (1.28: ) +Entering state 11 +Next token is token ')' (1.28: ) +Shifting token ')' (1.28: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.22: ) + $2 = token error (1.23-27: ) + $3 = token ')' (1.28: ) +-> $$ = nterm exp (1.22-28: 1111) Entering state 29 Reading a token -Next token is token ')' (1.7: ) +Next token is token '+' (1.30: ) Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) + $1 = nterm exp (1.1-18: 2222) + $2 = token '+' (1.20: ) + $3 = nterm exp (1.22-28: 1111) +-> $$ = nterm exp (1.1-28: 3333) Entering state 8 +Next token is token '+' (1.30: ) +Shifting token '+' (1.30: ) +Entering state 20 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) +Next token is token '(' (1.32: ) +Shifting token '(' (1.32: ) Entering state 4 Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) +Next token is token number (1.33: 1) +Shifting token number (1.33: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) + $1 = token number (1.33: 1) +-> $$ = nterm exp (1.33: 1) Entering state 12 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '*' (1.35: ) +Shifting token '*' (1.35: ) +Entering state 21 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.37: 2) +Shifting token number (1.37: 2) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 + $1 = token number (1.37: 2) +-> $$ = nterm exp (1.37: 2) +Entering state 30 Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) +Next token is token '*' (1.39: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.33: 1) + $2 = token '*' (1.35: ) + $3 = nterm exp (1.37: 2) +-> $$ = nterm exp (1.33-37: 2) Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 +Next token is token '*' (1.39: ) +Shifting token '*' (1.39: ) +Entering state 21 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '*' (1.41: ) +1.41: syntax error, unexpected '*', expecting number or '-' or '(' or '!' +Error: popping token '*' (1.39: ) +Error: popping nterm exp (1.33-37: 2) +Shifting token error (1.33-41: ) +Entering state 11 +Next token is token '*' (1.41: ) +Error: discarding token '*' (1.41: ) +Reading a token +Next token is token ')' (1.42: ) +Entering state 11 +Next token is token ')' (1.42: ) +Shifting token ')' (1.42: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.32: ) + $2 = token error (1.33-41: ) + $3 = token ')' (1.42: ) +-> $$ = nterm exp (1.32-42: 1111) +Entering state 29 +Reading a token +Next token is token '=' (1.44: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-28: 3333) + $2 = token '+' (1.30: ) + $3 = nterm exp (1.32-42: 1111) +-> $$ = nterm exp (1.1-42: 4444) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '=' (1.44: ) +Shifting token '=' (1.44: ) +Entering state 18 +Reading a token +Next token is token number (1.46: 1) +Shifting token number (1.46: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.46: 1) +-> $$ = nterm exp (1.46: 1) +Entering state 27 +Reading a token +Next token is token '\n' (1.47-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-42: 4444) + $2 = token '=' (1.44: ) + $3 = nterm exp (1.46: 1) +1.1-46: error: 4444 != 1 +-> $$ = nterm exp (1.1-46: 4444) +Entering state 8 +Next token is token '\n' (1.47-2.0: ) +Shifting token '\n' (1.47-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-46: 4444) + $2 = token '\n' (1.47-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -243556,8 +243587,8 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -./calc.at:1492: "$PERL" -pi -e 'use strict; +./regression.at:679: cat tables.c +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -243567,9 +243598,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:1145: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS - | (!!) + (1 2) = 1 -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./existing.at:808: grep '^State.*conflicts:' input.output ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -243580,9 +243609,19 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +636. regression.at:661: ok +./calc.at:1491: cat stderr +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1494: cat stderr +input: +input: + | (!!) + (1 2) = 1 +./calc.at:1494: $PREPARSER ./calc input + | error ./calc.at:1491: $PREPARSER ./calc input -stderr: +stderr: +stderr: Starting parse Entering state 0 Reading a token @@ -243694,11 +243733,15 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1492: cat stderr -./calc.at:1494: cat stderr +Starting parse +Entering state 0 +Reading a token +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -568. calc.at:1492: ok -input: +stderr: stderr: Starting parse Entering state 0 @@ -243811,32 +243854,15 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | 1//2 -./calc.at:1494: $PREPARSER ./calc input -641. regression.at:1220: testing Expecting two tokens ... -./regression.at:1220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y -stderr: +638. regression.at:1143: testing Dancer ... +./regression.at:1143: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -./calc.at:1491: "$PERL" -pi -e 'use strict; +Next token is token invalid token (1.1: ) +1.1: syntax error, unexpected invalid token +Cleanup: discarding lookahead token invalid token (1.1: ) +./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -243846,39 +243872,7 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./calc.at:1491: cat stderr -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '/' (1.2: ) -Shifting token '/' (1.2: ) -Entering state 22 -Reading a token -Next token is token '/' (1.3: ) -1.3: syntax error, unexpected '/', expecting number or '-' or '(' or '!' -Error: popping token '/' (1.2: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '/' (1.3: ) -stderr: -input: - | (- *) + (1 2) = 1 -./calc.at:1491: $PREPARSER ./calc input -stdout: -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -./regression.at:1143: $PREPARSER ./dancer -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -243888,6 +243882,51 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./calc.at:1494: cat stderr +637. regression.at:812: testing Useless Tokens ... +./regression.at:912: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-other -o input.c input.y +./calc.at:1491: cat stderr +input: + | (- *) + (1 2) = 1 +stderr: +./calc.at:1494: $PREPARSER ./calc input +639. regression.at:1144: testing Dancer %glr-parser ... +input: +input.y:66.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:170.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:175.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:180.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:188.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:202.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:207.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:221.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:300.10: error: empty rule without %empty [-Werror=empty-rule] +input.y:323.10: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 265 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:39.1-5: error: useless associativity for FUNC_CALL, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YNUMBER, use %precedence [-Werror=precedence] +input.y:44.1-5: error: useless associativity for YSTRING, use %precedence [-Werror=precedence] +input.y:42.1-9: error: useless precedence and associativity for APPEND_OP [-Werror=precedence] +input.y:33.1-6: error: useless associativity for ASSIGNOP, use %precedence [-Werror=precedence] +input.y:43.1-5: error: useless associativity for CONCAT_OP, use %precedence [-Werror=precedence] +input.y:37.1-5: error: useless precedence and associativity for LEX_GETLINE [-Werror=precedence] +input.y:38.1-9: error: useless associativity for LEX_IN, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for INCREMENT, use %precedence [-Werror=precedence] +input.y:49.1-5: error: useless associativity for DECREMENT, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_BUILTIN, use %precedence [-Werror=precedence] +input.y:39.1-5: error: useless associativity for LEX_LENGTH, use %precedence [-Werror=precedence] +input.y:40.1-9: error: useless precedence and associativity for ',' [-Werror=precedence] +input.y:47.1-6: error: useless associativity for '!', use %precedence [-Werror=precedence] +input.y:47.1-6: error: useless associativity for UNARY, use %precedence [-Werror=precedence] +input.y:50.1-5: error: useless associativity for '$', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless associativity for '(', use %precedence [-Werror=precedence] +input.y:51.1-5: error: useless precedence and associativity for ')' [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./regression.at:1144: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.c dancer.y +stderr: + | 1 = 2 = 3 +./calc.at:1491: $PREPARSER ./calc input Starting parse Entering state 0 Reading a token @@ -244005,13 +244044,38 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: sed 's,.*/$,,' stderr 1>&2 ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -syntax error, unexpected ':' -./regression.at:1143: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1220: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS -642. regression.at:1221: testing Expecting two tokens %glr-parser ... -./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y -./calc.at:1494: cat stderr stderr: Starting parse Entering state 0 @@ -244130,20 +244194,48 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -638. regression.at:1143: ok -input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] -stdout: -./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -input: -stderr: - | error -643. regression.at:1222: testing Expecting two tokens lalr1.cc ... -./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.cc expect2.y -input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] -input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] -input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stdout: -./calc.at:1494: $PREPARSER ./calc input +Starting parse +Entering state 0 +Reading a token +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 +Reading a token +Next token is token '=' (1.3: ) +Shifting token '=' (1.3: ) +Entering state 18 +Reading a token +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 27 +Reading a token +Next token is token '=' (1.7: ) +1.7: syntax error, unexpected '=' +Error: popping nterm exp (1.5: 2) +Error: popping token '=' (1.3: ) +Error: popping nterm exp (1.1: 1) +Cleanup: discarding lookahead token '=' (1.7: ) +./regression.at:1143: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -244154,32 +244246,41 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) +./calc.at:1494: cat stderr ./calc.at:1491: cat stderr -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./existing.at:808: grep '^State.*conflicts:' input.output - -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: input: +input: + | + | +1 +./calc.at:1491: $PREPARSER ./calc input + | (* *) + (*) + (*) +./calc.at:1494: $PREPARSER ./calc input +stderr: +./regression.at:1144: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o dancer dancer.c $LIBS +stderr: Starting parse Entering state 0 Reading a token -Next token is token invalid token (1.1: ) -1.1: syntax error, unexpected invalid token -Cleanup: discarding lookahead token invalid token (1.1: ) - | (* *) + (*) + (*) -./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1491: $PREPARSER ./calc input -./regression.at:1221: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) stderr: +input-lalr.y: warning: 78 shift/reduce conflicts [-Wconflicts-sr] +input-lalr.y: warning: 10 reduce/reduce conflicts [-Wconflicts-rr] +input-lalr.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples Starting parse Entering state 0 Reading a token @@ -244295,20 +244396,38 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 +stdout: ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1222: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS -./calc.at:1494: cat stderr +./existing.at:808: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] +stdout: +stderr: +./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' +stderr: +./existing.at:808: grep '^State.*conflicts:' input.output +Starting parse +Entering state 0 +Reading a token +Next token is token '\n' (1.1-2.0: ) +Shifting token '\n' (1.1-2.0: ) +Entering state 3 +Reducing stack 0 by rule 3 (line 74): + $1 = token '\n' (1.1-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Next token is token '+' (2.1: ) +2.1: syntax error, unexpected '+' +Error: popping nterm input (1.1-2.0: ) +Cleanup: discarding lookahead token '+' (2.1: ) stderr: +stdout: Starting parse Entering state 0 Reading a token @@ -244424,11 +244543,11 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -input: -644. regression.at:1230: testing Braced code in declaration in rules section ... -./regression.at:1261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - | 1 = 2 = 3 -./calc.at:1494: $PREPARSER ./calc input +stderr: +./existing.at:808: $PREPARSER ./input +stdout: +./regression.at:438: $PREPARSER ./input +stderr: ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -244439,79 +244558,11 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) -./calc.at:1491: cat stderr -stderr: -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1262: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -stdout: -./existing.at:808: $PREPARSER ./input -stderr: -input: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '=' (1.3: ) -Shifting token '=' (1.3: ) -Entering state 18 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 27 -Reading a token -Next token is token '=' (1.7: ) -1.7: syntax error, unexpected '=' -Error: popping nterm exp (1.5: 2) -Error: popping token '=' (1.3: ) -Error: popping nterm exp (1.1: 1) -Cleanup: discarding lookahead token '=' (1.7: ) ./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - | 1 + 2 * 3 + !+ ++ -./calc.at:1491: $PREPARSER ./calc input -619. existing.at:808: ok -stderr: +syntax error, unexpected a, expecting "\\\'\?\"\a\b\f\n\r\t\v\001\201\x001\x000081??!" ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -244522,76 +244573,26 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 +./regression.at:438: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: cat stderr +620. existing.at:808: ok +./calc.at:1494: cat stderr +./calc.at:1491: $PREPARSER ./calc /dev/null +stderr: +633. regression.at:438: ok Starting parse Entering state 0 Reading a token -Next token is token number (1.1: 1) -Shifting token number (1.1: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.1: 1) --> $$ = nterm exp (1.1: 1) -Entering state 8 -Reading a token -Next token is token '+' (1.3: ) -Shifting token '+' (1.3: ) -Entering state 20 -Reading a token -Next token is token number (1.5: 2) -Shifting token number (1.5: 2) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.5: 2) --> $$ = nterm exp (1.5: 2) -Entering state 29 -Reading a token -Next token is token '*' (1.7: ) -Shifting token '*' (1.7: ) -Entering state 21 -Reading a token -Next token is token number (1.9: 3) -Shifting token number (1.9: 3) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.9: 3) --> $$ = nterm exp (1.9: 3) -Entering state 30 -Reading a token -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 9 (line 92): - $1 = nterm exp (1.5: 2) - $2 = token '*' (1.7: ) - $3 = nterm exp (1.9: 3) --> $$ = nterm exp (1.5-9: 6) -Entering state 29 -Next token is token '+' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1: 1) - $2 = token '+' (1.3: ) - $3 = nterm exp (1.5-9: 6) --> $$ = nterm exp (1.1-9: 7) -Entering state 8 -Next token is token '+' (1.11: ) -Shifting token '+' (1.11: ) -Entering state 20 -Reading a token -Next token is token '!' (1.13: ) -Shifting token '!' (1.13: ) -Entering state 5 -Reading a token -Next token is token '+' (1.14: ) -Shifting token '+' (1.14: ) -Entering state 14 -Reducing stack 0 by rule 17 (line 108): - $1 = token '!' (1.13: ) - $2 = token '+' (1.14: ) -Cleanup: popping token '+' (1.11: ) -Cleanup: popping nterm exp (1.1-9: 7) +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) +input: + | 1 + 2 * 3 + !+ ++ +./calc.at:1494: $PREPARSER ./calc input ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: cat stderr +stderr: stderr: -input: Starting parse Entering state 0 Reading a token @@ -244657,35 +244658,14 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) - | - | +1 -./calc.at:1494: $PREPARSER ./calc input -input: -stderr: Starting parse Entering state 0 Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) - | 1 + 2 * 3 + !- ++ -./calc.at:1491: $PREPARSER ./calc input +Now at end of input. +1.1: syntax error, unexpected end of input +Cleanup: discarding lookahead token end of input (1.1: ) + ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 -stderr: stderr: Starting parse Entering state 0 @@ -244744,37 +244724,30 @@ Shifting token '!' (1.13: ) Entering state 5 Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 13 -Reducing stack 0 by rule 18 (line 109): +Next token is token '+' (1.14: ) +Shifting token '+' (1.14: ) +Entering state 14 +Reducing stack 0 by rule 17 (line 108): $1 = token '!' (1.13: ) - $2 = token '-' (1.14: ) + $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Next token is token '\n' (1.1-2.0: ) -Shifting token '\n' (1.1-2.0: ) -Entering state 3 -Reducing stack 0 by rule 3 (line 74): - $1 = token '\n' (1.1-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Next token is token '+' (2.1: ) -2.1: syntax error, unexpected '+' -Error: popping nterm input (1.1-2.0: ) -Cleanup: discarding lookahead token '+' (2.1: ) +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +input: +./calc.at:1491: cat stderr + | 1 + 2 * 3 + !- ++ +./calc.at:1494: $PREPARSER ./calc input stderr: -./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +input: Starting parse Entering state 0 Reading a token @@ -244840,290 +244813,13 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./torture.at:394: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -./calc.at:1491: cat stderr -input: - | (#) + (#) = 2222 -645. regression.at:1291: testing String alias declared after use ... + | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 ./calc.at:1491: $PREPARSER ./calc input -./calc.at:1494: $PREPARSER ./calc /dev/null -stderr: -./regression.at:1304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stdout: +./regression.at:917: cat tables.c ./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./existing.at:808: $PREPARSER ./input -./regression.at:1220: $PREPARSER ./expect2 -stderr: stderr: stderr: stderr: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Starting parse -Entering state 0 -Reading a token -Now at end of input. -1.1: syntax error, unexpected end of input -Cleanup: discarding lookahead token end of input (1.1: ) -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) -Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token ')' (1.3: ) -Entering state 11 -Next token is token ')' (1.3: ) -Shifting token ')' (1.3: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2: ) - $3 = token ')' (1.3: ) --> $$ = nterm exp (1.1-3: 1111) -Entering state 8 -Reading a token -Next token is token '+' (1.5: ) -Shifting token '+' (1.5: ) -Entering state 20 -Reading a token -Next token is token '(' (1.7: ) -Shifting token '(' (1.7: ) -Entering state 4 -Reading a token -1.8: syntax error: invalid character: '#' -Next token is token error (1.8: ) -Shifting token error (1.8: ) -Entering state 11 -Next token is token error (1.8: ) -Error: discarding token error (1.8: ) -Reading a token -Next token is token ')' (1.9: ) -Entering state 11 -Next token is token ')' (1.9: ) -Shifting token ')' (1.9: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.7: ) - $2 = token error (1.8: ) - $3 = token ')' (1.9: ) --> $$ = nterm exp (1.7-9: 1111) -Entering state 29 -Reading a token -Next token is token '=' (1.11: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.1-3: 1111) - $2 = token '+' (1.5: ) - $3 = nterm exp (1.7-9: 1111) --> $$ = nterm exp (1.1-9: 2222) -Entering state 8 -Next token is token '=' (1.11: ) -Shifting token '=' (1.11: ) -Entering state 18 -Reading a token -Next token is token number (1.13-16: 2222) -Shifting token number (1.13-16: 2222) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.13-16: 2222) --> $$ = nterm exp (1.13-16: 2222) -Entering state 27 -Reading a token -Next token is token '\n' (1.17-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-9: 2222) - $2 = token '=' (1.11: ) - $3 = nterm exp (1.13-16: 2222) --> $$ = nterm exp (1.1-16: 2222) -Entering state 8 -Next token is token '\n' (1.17-2.0: ) -Shifting token '\n' (1.17-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-16: 2222) - $2 = token '\n' (1.17-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -syntax error, unexpected '+', expecting A or B -./regression.at:1220: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -620. existing.at:808: ok -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -641. regression.at:1220: ok -./calc.at:1494: cat stderr -./calc.at:1491: cat stderr -input: - | () + (1 + 1 + 1 +) + (* * *) + (1 * 2 * *) = 1 -./calc.at:1494: $PREPARSER ./calc input - -645. regression.at:1291: ok -input: - -stderr: Starting parse Entering state 0 Reading a token @@ -245371,89 +245067,94 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - | (1 + #) = 1111 -./calc.at:1491: $PREPARSER ./calc input -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: Starting parse Entering state 0 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) +Next token is token number (1.1: 1) +Shifting token number (1.1: 1) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 + $1 = token number (1.1: 1) +-> $$ = nterm exp (1.1: 1) +Entering state 8 Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token '+' (1.3: ) +Shifting token '+' (1.3: ) Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token ')' (1.7: ) -Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 +Next token is token number (1.5: 2) +Shifting token number (1.5: 2) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.5: 2) +-> $$ = nterm exp (1.5: 2) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 +Next token is token '*' (1.7: ) +Shifting token '*' (1.7: ) +Entering state 21 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.9: 3) +Shifting token number (1.9: 3) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 + $1 = token number (1.9: 3) +-> $$ = nterm exp (1.9: 3) +Entering state 30 Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 9 (line 92): + $1 = nterm exp (1.5: 2) + $2 = token '*' (1.7: ) + $3 = nterm exp (1.9: 3) +-> $$ = nterm exp (1.5-9: 6) +Entering state 29 +Next token is token '+' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1: 1) + $2 = token '+' (1.3: ) + $3 = nterm exp (1.5-9: 6) +-> $$ = nterm exp (1.1-9: 7) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 +Next token is token '+' (1.11: ) +Shifting token '+' (1.11: ) +Entering state 20 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) +Next token is token '!' (1.13: ) +Shifting token '!' (1.13: ) +Entering state 5 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 13 +Reducing stack 0 by rule 18 (line 109): + $1 = token '!' (1.13: ) + $2 = token '-' (1.14: ) +Cleanup: popping token '+' (1.11: ) +Cleanup: popping nterm exp (1.1-9: 7) +640. regression.at:1145: testing Dancer lalr1.cc ... +637. regression.at:812: ok +stdout: +./regression.at:437: $PREPARSER ./input +./regression.at:1145: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o dancer.cc dancer.y stderr: ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error, unexpected a, expecting ∃¬∩∪∀ +./regression.at:437: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +641. regression.at:1220: testing Expecting two tokens ... +./regression.at:1220: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y +stderr: +632. regression.at:437: ok +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -245702,6 +245403,22 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1494: cat stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 + +input: +./calc.at:1491: cat stderr + | (#) + (#) = 2222 +./calc.at:1494: $PREPARSER ./calc input stderr: Starting parse Entering state 0 @@ -245710,64 +245427,84 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) +Next token is token ')' (1.3: ) +Entering state 11 +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) +Entering state 8 +Reading a token +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) Entering state 20 Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) +Entering state 4 +Reading a token +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.9: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -245780,44 +245517,11 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./calc.at:1494: cat stderr -646. regression.at:1314: testing Extra lookahead sets in report ... -./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y -stdout: -./existing.at:1460: $PREPARSER ./input -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -stderr: -syntax error, unexpected LEFT -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1491: cat stderr -input: input: -647. regression.at:1355: testing Token number in precedence declaration ... - | (# + 1) = 1111 -./calc.at:1491: $PREPARSER ./calc input | (!!) + (1 2) = 1 -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1220: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS stderr: Starting parse Entering state 0 @@ -245930,8 +245634,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./regression.at:1145: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o dancer dancer.cc $LIBS stderr: -622. existing.at:1460: ok Starting parse Entering state 0 Reading a token @@ -245946,125 +245650,77 @@ Next token is token error (1.2: ) Error: discarding token error (1.2: ) Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) -Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.3: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.3: ) +Shifting token ')' (1.3: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) -Entering state 18 -Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.15-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $2 = token error (1.2: ) + $3 = token ')' (1.3: ) +-> $$ = nterm exp (1.1-3: 1111) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -648. regression.at:1408: testing parse-gram.y: LALR = IELR ... -646. regression.at:1314: ok -stderr: -Starting parse -Entering state 0 +Next token is token '+' (1.5: ) +Shifting token '+' (1.5: ) +Entering state 20 Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) +Next token is token '(' (1.7: ) +Shifting token '(' (1.7: ) Entering state 4 Reading a token -1.2: syntax error: invalid character: '#' -Next token is token error (1.2: ) -Shifting token error (1.2: ) +1.8: syntax error: invalid character: '#' +Next token is token error (1.8: ) +Shifting token error (1.8: ) Entering state 11 -Next token is token error (1.2: ) -Error: discarding token error (1.2: ) -Reading a token -Next token is token '+' (1.4: ) -Error: discarding token '+' (1.4: ) -Reading a token -Next token is token number (1.6: 1) -Error: discarding token number (1.6: 1) +Next token is token error (1.8: ) +Error: discarding token error (1.8: ) Reading a token -Next token is token ')' (1.7: ) +Next token is token ')' (1.9: ) Entering state 11 -Next token is token ')' (1.7: ) -Shifting token ')' (1.7: ) +Next token is token ')' (1.9: ) +Shifting token ')' (1.9: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-6: ) - $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 1111) -Entering state 8 + $1 = token '(' (1.7: ) + $2 = token error (1.8: ) + $3 = token ')' (1.9: ) +-> $$ = nterm exp (1.7-9: 1111) +Entering state 29 Reading a token -Next token is token '=' (1.9: ) -Shifting token '=' (1.9: ) +Next token is token '=' (1.11: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.1-3: 1111) + $2 = token '+' (1.5: ) + $3 = nterm exp (1.7-9: 1111) +-> $$ = nterm exp (1.1-9: 2222) +Entering state 8 +Next token is token '=' (1.11: ) +Shifting token '=' (1.11: ) Entering state 18 Reading a token -Next token is token number (1.11-14: 1111) -Shifting token number (1.11-14: 1111) +Next token is token number (1.13-16: 2222) +Shifting token number (1.13-16: 2222) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.11-14: 1111) --> $$ = nterm exp (1.11-14: 1111) + $1 = token number (1.13-16: 2222) +-> $$ = nterm exp (1.13-16: 2222) Entering state 27 Reading a token -Next token is token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-7: 1111) - $2 = token '=' (1.9: ) - $3 = nterm exp (1.11-14: 1111) --> $$ = nterm exp (1.1-14: 1111) + $1 = nterm exp (1.1-9: 2222) + $2 = token '=' (1.11: ) + $3 = nterm exp (1.13-16: 2222) +-> $$ = nterm exp (1.1-16: 2222) Entering state 8 -Next token is token '\n' (1.15-2.0: ) -Shifting token '\n' (1.15-2.0: ) +Next token is token '\n' (1.17-2.0: ) +Shifting token '\n' (1.17-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-14: 1111) - $2 = token '\n' (1.15-2.0: ) + $1 = nterm exp (1.1-16: 2222) + $2 = token '\n' (1.17-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -246077,6 +245733,9 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: Starting parse Entering state 0 Reading a token @@ -246188,8 +245847,6 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -./regression.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=lalr input.y ./calc.at:1494: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -246210,104 +245867,16 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: cat stderr +./existing.at:1460: sed 's/^%define lr.type .*$//' input.y > input-lalr.y ./calc.at:1491: cat stderr - -input: +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all,no-cex input-lalr.y +./calc.at:1494: cat stderr input: - | (1 + # + 1) = 1111 -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror -./calc.at:1491: $PREPARSER ./calc input -stderr: | (- *) + (1 2) = 1 +./calc.at:1491: $PREPARSER ./calc input +643. regression.at:1222: testing Expecting two tokens lalr1.cc ... +./regression.at:1222: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.cc expect2.y stderr: -./calc.at:1494: $PREPARSER ./calc input -stdout: -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token '(' (1.1: ) -Shifting token '(' (1.1: ) -Entering state 4 -Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 -Reading a token -Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 -Reading a token -1.6: syntax error: invalid character: '#' -Next token is token error (1.6: ) -Error: popping token '+' (1.4: ) -Error: popping nterm exp (1.2: 1) -Shifting token error (1.2-6: ) -Entering state 11 -Next token is token error (1.6: ) -Error: discarding token error (1.6: ) -Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) -Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) -Entering state 25 -Reducing stack 0 by rule 14 (line 105): - $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) -Entering state 8 -Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) -Entering state 18 -Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) -Entering state 27 -Reading a token -Next token is token '\n' (1.19-2.0: ) -Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) -Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) -Entering state 24 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) --> $$ = nterm line (1.1-2.0: ) -Entering state 7 -Reducing stack 0 by rule 1 (line 69): - $1 = nterm line (1.1-2.0: ) --> $$ = nterm input (1.1-2.0: ) -Entering state 6 -Reading a token -Now at end of input. -Shifting token end of input (2.1: ) -Entering state 16 -Cleanup: popping token end of input (2.1: ) -Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1263: $PREPARSER ./input --debug Starting parse Entering state 0 Reading a token @@ -246424,28 +245993,13 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ... -./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reducing stack by rule 1 (line 20): --> $$ = nterm start () -Entering state 1 -Stack now 0 1 -Reading a token -Next token is token 'a' (PRINTER) -syntax error, unexpected 'a', expecting end of file -Error: popping nterm start () -Stack now 0 -Cleanup: discarding lookahead token 'a' (PRINTER) -DESTRUCTOR -Stack now 0 +input: ./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1263: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +642. regression.at:1221: testing Expecting two tokens %glr-parser ... + | (1 + #) = 1111 +./calc.at:1494: $PREPARSER ./calc input stderr: +./regression.at:1221: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o expect2.c expect2.y Starting parse Entering state 0 Reading a token @@ -246474,49 +246028,43 @@ Next token is token error (1.6: ) Error: discarding token error (1.6: ) Reading a token -Next token is token '+' (1.8: ) -Error: discarding token '+' (1.8: ) -Reading a token -Next token is token number (1.10: 1) -Error: discarding token number (1.10: 1) -Reading a token -Next token is token ')' (1.11: ) +Next token is token ')' (1.7: ) Entering state 11 -Next token is token ')' (1.11: ) -Shifting token ')' (1.11: ) +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) Entering state 25 Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = token error (1.2-10: ) - $3 = token ')' (1.11: ) --> $$ = nterm exp (1.1-11: 1111) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '=' (1.13: ) -Shifting token '=' (1.13: ) +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) Entering state 18 Reading a token -Next token is token number (1.15-18: 1111) -Shifting token number (1.15-18: 1111) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.15-18: 1111) --> $$ = nterm exp (1.15-18: 1111) + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) Entering state 27 Reading a token -Next token is token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) Reducing stack 0 by rule 6 (line 80): - $1 = nterm exp (1.1-11: 1111) - $2 = token '=' (1.13: ) - $3 = nterm exp (1.15-18: 1111) --> $$ = nterm exp (1.1-18: 1111) + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.19-2.0: ) -Shifting token '\n' (1.19-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-18: 1111) - $2 = token '\n' (1.19-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -246530,8 +246078,6 @@ Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) stderr: -650. regression.at:1504: testing parse.error=verbose overflow ... -./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y Starting parse Entering state 0 Reading a token @@ -246648,11 +246194,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -644. regression.at:1230: ok -stderr: -input.y:24.5-19: error: rule useless in parser due to conflicts [-Werror=other] -input.y:28.5-19: error: rule useless in parser due to conflicts [-Werror=other] -input.y:18.1-5: error: useless precedence and associativity for TK1 [-Werror=precedence] +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { @@ -246663,29 +246205,6 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 - -./calc.at:1491: cat stderr -./calc.at:1494: cat stderr -./regression.at:1388: sed 's,.*/$,,' stderr 1>&2 -./regression.at:1482: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -input: - | (1 + 1) / (1 - 1) -input: -./calc.at:1491: $PREPARSER ./calc input - | (* *) + (*) + (*) -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error -./calc.at:1494: $PREPARSER ./calc input -stderr: stderr: Starting parse Entering state 0 @@ -246706,90 +246225,52 @@ Shifting token '+' (1.4: ) Entering state 20 Reading a token -Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -246802,11 +246283,34 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./calc.at:1491: cat stderr +./regression.at:1222: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o expect2 expect2.cc $LIBS +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 +stderr: +input: + | (* *) + (*) + (*) +./calc.at:1491: $PREPARSER ./calc input +stderr: stdout: -./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./calc.at:1494: cat stderr +./existing.at:808: $PREPARSER ./input +./existing.at:1460: $PREPARSER ./input stderr: -./regression.at:1144: $PREPARSER ./dancer +stderr: +stderr: +syntax error, unexpected LEFT +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Reading a token @@ -246922,9 +246426,18 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) +619. existing.at:808: ok +stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +input: + | (# + 1) = 1111 +./calc.at:1494: $PREPARSER ./calc input +./regression.at:1143: $PREPARSER ./dancer stderr: stderr: -syntax error, unexpected ':' +622. existing.at:1460: ./regression.at:1221: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o expect2 expect2.c $LIBS + ok Starting parse Entering state 0 Reading a token @@ -246932,102 +246445,56 @@ Shifting token '(' (1.1: ) Entering state 4 Reading a token -Next token is token number (1.2: 1) -Shifting token number (1.2: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.2: 1) --> $$ = nterm exp (1.2: 1) -Entering state 12 +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) Reading a token Next token is token '+' (1.4: ) -Shifting token '+' (1.4: ) -Entering state 20 +Error: discarding token '+' (1.4: ) Reading a token Next token is token number (1.6: 1) -Shifting token number (1.6: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.6: 1) --> $$ = nterm exp (1.6: 1) -Entering state 29 +Error: discarding token number (1.6: 1) Reading a token Next token is token ')' (1.7: ) -Reducing stack 0 by rule 7 (line 90): - $1 = nterm exp (1.2: 1) - $2 = token '+' (1.4: ) - $3 = nterm exp (1.6: 1) --> $$ = nterm exp (1.2-6: 2) -Entering state 12 +Entering state 11 Next token is token ')' (1.7: ) Shifting token ')' (1.7: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): +Entering state 25 +Reducing stack 0 by rule 14 (line 105): $1 = token '(' (1.1: ) - $2 = nterm exp (1.2-6: 2) + $2 = token error (1.2-6: ) $3 = token ')' (1.7: ) --> $$ = nterm exp (1.1-7: 2) +-> $$ = nterm exp (1.1-7: 1111) Entering state 8 Reading a token -Next token is token '/' (1.9: ) -Shifting token '/' (1.9: ) -Entering state 22 -Reading a token -Next token is token '(' (1.11: ) -Shifting token '(' (1.11: ) -Entering state 4 -Reading a token -Next token is token number (1.12: 1) -Shifting token number (1.12: 1) -Entering state 1 -Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.12: 1) --> $$ = nterm exp (1.12: 1) -Entering state 12 -Reading a token -Next token is token '-' (1.14: ) -Shifting token '-' (1.14: ) -Entering state 19 +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 Reading a token -Next token is token number (1.16: 1) -Shifting token number (1.16: 1) +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) Entering state 1 Reducing stack 0 by rule 5 (line 79): - $1 = token number (1.16: 1) --> $$ = nterm exp (1.16: 1) -Entering state 28 -Reading a token -Next token is token ')' (1.17: ) -Reducing stack 0 by rule 8 (line 91): - $1 = nterm exp (1.12: 1) - $2 = token '-' (1.14: ) - $3 = nterm exp (1.16: 1) --> $$ = nterm exp (1.12-16: 0) -Entering state 12 -Next token is token ')' (1.17: ) -Shifting token ')' (1.17: ) -Entering state 26 -Reducing stack 0 by rule 13 (line 104): - $1 = token '(' (1.11: ) - $2 = nterm exp (1.12-16: 0) - $3 = token ')' (1.17: ) --> $$ = nterm exp (1.11-17: 0) -Entering state 31 + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 Reading a token -Next token is token '\n' (1.18-2.0: ) -Reducing stack 0 by rule 10 (line 93): - $1 = nterm exp (1.1-7: 2) - $2 = token '/' (1.9: ) - $3 = nterm exp (1.11-17: 0) -1.11-17: error: null divisor --> $$ = nterm exp (1.1-17: 2) +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) Entering state 8 -Next token is token '\n' (1.18-2.0: ) -Shifting token '\n' (1.18-2.0: ) +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) Entering state 24 Reducing stack 0 by rule 4 (line 75): - $1 = nterm exp (1.1-17: 2) - $2 = token '\n' (1.18-2.0: ) + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) -> $$ = nterm line (1.1-2.0: ) Entering state 7 Reducing stack 0 by rule 1 (line 69): @@ -247040,26 +246507,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1144: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -651. regression.at:1628: testing LAC: Exploratory stack ... -639. regression.at:1144: ok -./calc.at:1491: "$PERL" -pi -e 'use strict; - s{syntax error on token \[(.*?)\] \(expected: (.*)\)} - { - my $unexp = $1; - my @exps = $2 =~ /\[(.*?)\]/g; - ($#exps && $#exps < 4) - ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" - : "syntax error, unexpected $unexp"; - }eg -' expout || exit 77 -./regression.at:1611: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1713: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ - -Dparse.lac.es-capacity-initial=1 \ - -Dparse.lac.memory-trace=full -o input.c input.y -stderr: stderr: +syntax error, unexpected ':' Starting parse Entering state 0 Reading a token @@ -247175,11 +246624,10 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1491: cat stderr -input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] -stdout: -./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' -./calc.at:1494: "$PERL" -pi -e 'use strict; +./regression.at:1143: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -247189,17 +246637,95 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -566. calc.at:1491: ok -./calc.at:1494: cat stderr +stderr: +638. regression.at:1143: ok +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +1.2: syntax error: invalid character: '#' +Next token is token error (1.2: ) +Shifting token error (1.2: ) +Entering state 11 +Next token is token error (1.2: ) +Error: discarding token error (1.2: ) +Reading a token +Next token is token '+' (1.4: ) +Error: discarding token '+' (1.4: ) +Reading a token +Next token is token number (1.6: 1) +Error: discarding token number (1.6: 1) +Reading a token +Next token is token ')' (1.7: ) +Entering state 11 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-6: ) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.9: ) +Shifting token '=' (1.9: ) +Entering state 18 +Reading a token +Next token is token number (1.11-14: 1111) +Shifting token number (1.11-14: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.11-14: 1111) +-> $$ = nterm exp (1.11-14: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.15-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-7: 1111) + $2 = token '=' (1.9: ) + $3 = nterm exp (1.11-14: 1111) +-> $$ = nterm exp (1.1-14: 1111) +Entering state 8 +Next token is token '\n' (1.15-2.0: ) +Shifting token '\n' (1.15-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-14: 1111) + $2 = token '\n' (1.15-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: cat stderr -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none -./regression.at:1713: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS input: -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 | 1 + 2 * 3 + !+ ++ -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input stderr: +./calc.at:1494: cat stderr Starting parse Entering state 0 Reading a token @@ -247265,7 +246791,11 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input: + | (1 + # + 1) = 1111 +./calc.at:1494: $PREPARSER ./calc input +stderr: stderr: Starting parse Entering state 0 @@ -247332,12 +246862,97 @@ $2 = token '+' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) input: +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr | 1 + 2 * 3 + !- ++ -./calc.at:1494: $PREPARSER ./calc input +./calc.at:1491: $PREPARSER ./calc input +644. regression.at:1230: testing Braced code in declaration in rules section ... +./regression.at:1261: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: stderr: -652. regression.at:1739: testing LAC: Memory exhaustion ... -./regression.at:1771: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y Starting parse Entering state 0 Reading a token @@ -247403,9 +247018,101 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +1.6: syntax error: invalid character: '#' +Next token is token error (1.6: ) +Error: popping token '+' (1.4: ) +Error: popping nterm exp (1.2: 1) +Shifting token error (1.2-6: ) +Entering state 11 +Next token is token error (1.6: ) +Error: discarding token error (1.6: ) +Reading a token +Next token is token '+' (1.8: ) +Error: discarding token '+' (1.8: ) +Reading a token +Next token is token number (1.10: 1) +Error: discarding token number (1.10: 1) +Reading a token +Next token is token ')' (1.11: ) +Entering state 11 +Next token is token ')' (1.11: ) +Shifting token ')' (1.11: ) +Entering state 25 +Reducing stack 0 by rule 14 (line 105): + $1 = token '(' (1.1: ) + $2 = token error (1.2-10: ) + $3 = token ')' (1.11: ) +-> $$ = nterm exp (1.1-11: 1111) +Entering state 8 +Reading a token +Next token is token '=' (1.13: ) +Shifting token '=' (1.13: ) +Entering state 18 +Reading a token +Next token is token number (1.15-18: 1111) +Shifting token number (1.15-18: 1111) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.15-18: 1111) +-> $$ = nterm exp (1.15-18: 1111) +Entering state 27 +Reading a token +Next token is token '\n' (1.19-2.0: ) +Reducing stack 0 by rule 6 (line 80): + $1 = nterm exp (1.1-11: 1111) + $2 = token '=' (1.13: ) + $3 = nterm exp (1.15-18: 1111) +-> $$ = nterm exp (1.1-18: 1111) +Entering state 8 +Next token is token '\n' (1.19-2.0: ) +Shifting token '\n' (1.19-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-18: 1111) + $2 = token '\n' (1.19-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 Starting parse Entering state 0 Reading a token @@ -247471,8 +247178,12 @@ $2 = token '-' (1.14: ) Cleanup: popping token '+' (1.11: ) Cleanup: popping nterm exp (1.1-9: 7) -./regression.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=ielr input.y -./calc.at:1494: "$PERL" -pi -e 'use strict; +646. regression.at:1314: testing Extra lookahead sets in report ... +645. regression.at:1291: testing String alias declared after use ... +./regression.at:1304: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1494: cat stderr +./regression.at:1329: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret --report=all input.y +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -247482,15 +247193,262 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -653. regression.at:1874: testing Lex and parse params: yacc.c ... -./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./calc.at:1494: cat stderr +stderr: +stdout: +./regression.at:1262: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS input: - | (#) + (#) = 2222 +./regression.at:1220: $PREPARSER ./expect2 +./calc.at:1491: cat stderr + | (1 + 1) / (1 - 1) ./calc.at:1494: $PREPARSER ./calc input -./regression.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1874: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +./regression.at:1330: sed -n '/^State 1$/,/^State 2$/p' input.output +stderr: +syntax error, unexpected '+', expecting A or B +./regression.at:1220: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input: +646. regression.at:1314: ok +./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + | (#) + (#) = 2222 +./calc.at:1491: $PREPARSER ./calc input +stderr: +stderr: +641. regression.at:1220: ok +Starting parse +Entering state 0 +Reading a token +Next token is token '(' (1.1: ) +Shifting token '(' (1.1: ) +Entering state 4 +Reading a token +Next token is token number (1.2: 1) +Shifting token number (1.2: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.2: 1) +-> $$ = nterm exp (1.2: 1) +Entering state 12 +Reading a token +Next token is token '+' (1.4: ) +Shifting token '+' (1.4: ) +Entering state 20 +Reading a token +Next token is token number (1.6: 1) +Shifting token number (1.6: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.6: 1) +-> $$ = nterm exp (1.6: 1) +Entering state 29 +Reading a token +Next token is token ')' (1.7: ) +Reducing stack 0 by rule 7 (line 90): + $1 = nterm exp (1.2: 1) + $2 = token '+' (1.4: ) + $3 = nterm exp (1.6: 1) +-> $$ = nterm exp (1.2-6: 2) +Entering state 12 +Next token is token ')' (1.7: ) +Shifting token ')' (1.7: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.1: ) + $2 = nterm exp (1.2-6: 2) + $3 = token ')' (1.7: ) +-> $$ = nterm exp (1.1-7: 2) +Entering state 8 +Reading a token +Next token is token '/' (1.9: ) +Shifting token '/' (1.9: ) +Entering state 22 +Reading a token +Next token is token '(' (1.11: ) +Shifting token '(' (1.11: ) +Entering state 4 +Reading a token +Next token is token number (1.12: 1) +Shifting token number (1.12: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.12: 1) +-> $$ = nterm exp (1.12: 1) +Entering state 12 +Reading a token +Next token is token '-' (1.14: ) +Shifting token '-' (1.14: ) +Entering state 19 +Reading a token +Next token is token number (1.16: 1) +Shifting token number (1.16: 1) +Entering state 1 +Reducing stack 0 by rule 5 (line 79): + $1 = token number (1.16: 1) +-> $$ = nterm exp (1.16: 1) +Entering state 28 +Reading a token +Next token is token ')' (1.17: ) +Reducing stack 0 by rule 8 (line 91): + $1 = nterm exp (1.12: 1) + $2 = token '-' (1.14: ) + $3 = nterm exp (1.16: 1) +-> $$ = nterm exp (1.12-16: 0) +Entering state 12 +Next token is token ')' (1.17: ) +Shifting token ')' (1.17: ) +Entering state 26 +Reducing stack 0 by rule 13 (line 104): + $1 = token '(' (1.11: ) + $2 = nterm exp (1.12-16: 0) + $3 = token ')' (1.17: ) +-> $$ = nterm exp (1.11-17: 0) +Entering state 31 +Reading a token +Next token is token '\n' (1.18-2.0: ) +Reducing stack 0 by rule 10 (line 93): + $1 = nterm exp (1.1-7: 2) + $2 = token '/' (1.9: ) + $3 = nterm exp (1.11-17: 0) +1.11-17: error: null divisor +-> $$ = nterm exp (1.1-17: 2) +Entering state 8 +Next token is token '\n' (1.18-2.0: ) +Shifting token '\n' (1.18-2.0: ) +Entering state 24 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm exp (1.1-17: 2) + $2 = token '\n' (1.18-2.0: ) +-> $$ = nterm line (1.1-2.0: ) +Entering state 7 +Reducing stack 0 by rule 1 (line 69): + $1 = nterm line (1.1-2.0: ) +-> $$ = nterm input (1.1-2.0: ) +Entering state 6 +Reading a token +Now at end of input. +Shifting token end of input (2.1: ) +Entering state 16 +Cleanup: popping token end of input (2.1: ) +Cleanup: popping nterm input (1.1-2.0: ) +input-lalr.y:471.11-48: warning: rule useless in parser due to conflicts [-Wother] +stdout: +645. regression.at:1291: ok +stderr: +./existing.at:1460: diff -u input-lalr.output input.output | sed -n '/^@@/,$p' | sed 's/^ $//' Starting parse Entering state 0 Reading a token @@ -247588,9 +247546,20 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./calc.at:1494: "$PERL" -pi -e 'use strict; + s{syntax error on token \[(.*?)\] \(expected: (.*)\)} + { + my $unexp = $1; + my @exps = $2 =~ /\[(.*?)\]/g; + ($#exps && $#exps < 4) + ? "syntax error, unexpected $unexp, expecting @{[join(\" or \", @exps)]}" + : "syntax error, unexpected $unexp"; + }eg +' expout || exit 77 stderr: + Starting parse Entering state 0 Reading a token @@ -247688,11 +247657,10 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -stdout: -./regression.at:1483: $PREPARSER ./input -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1494: cat stderr + +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -247702,21 +247670,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B -syntax error, unexpected end of file, expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B -./regression.at:1483: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: cat stderr -649. regression.at:1430: ok -stderr: +570. calc.at:1494: ok +./calc.at:1491: cat stderr input: -stdout: + | (1 + #) = 1111 -./calc.at:1494: $PREPARSER ./calc input -./regression.at:1221: $PREPARSER ./expect2 -stderr: -syntax error, unexpected '+', expecting A or B +./calc.at:1491: $PREPARSER ./calc input stderr: -./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +647. regression.at:1355: testing Token number in precedence declaration ... Starting parse Entering state 0 Reading a token @@ -247794,10 +247755,13 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -642. regression.at:1221: ok +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wall -o input.c input.y +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +648. regression.at:1408: testing parse-gram.y: LALR = IELR ... +./regression.at:1414: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=lalr input.y stderr: +649. regression.at:1430: testing parse.error=verbose and YYSTACK_USE_ALLOCA ... +./regression.at:1481: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y Starting parse Entering state 0 Reading a token @@ -247875,22 +247839,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -input.y: In function 'yyparse': -input.y:59:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] - 59 | fprintf (stderr, " yymsg_alloc = %d\n", yymsg_alloc); - | ^~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~ - | | - | long int -input.y:60:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] - 60 | fprintf (stderr, " YYSTACK_ALLOC_MAXIMUM = %d\n", - | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -input.y:62:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] - 62 | fprintf (stderr, " YYSIZE_MAXIMUM = %d\n", YYSIZE_MAXIMUM); - | ^~~~~~~~~~~~~~~~~~~~~~~~~ -stdout: -./regression.at:1613: $PREPARSER ./input -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -247900,21 +247849,14 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stderr: -syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B or 123456789112345678921234567893123456789412345678951234567896123C -syntax error, unexpected 'd' -syntax error -memory exhausted -./regression.at:1613: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: cat stderr - -650. regression.at:1504: ok +./calc.at:1491: cat stderr input: | (# + 1) = 1111 -./calc.at:1494: $PREPARSER ./calc input +650. regression.at:1504: testing parse.error=verbose overflow ... +./regression.at:1604: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1491: $PREPARSER ./calc input +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Werror stderr: -654. regression.at:1875: testing Lex and parse params: glr.c ... -./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y Starting parse Entering state 0 Reading a token @@ -247984,10 +247926,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -655. regression.at:1876: testing Lex and parse params: lalr1.cc ... +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1482: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: Starting parse Entering state 0 @@ -248058,15 +247998,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stdout: -stderr: -./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./regression.at:1394: $PREPARSER ./input -stdout: -./regression.at:1874: $PREPARSER ./input -stderr: -stderr: -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -248076,23 +248008,18 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -stdout: -./existing.at:1460: $PREPARSER ./input -./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./calc.at:1491: cat stderr +./regression.at:1611: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -./calc.at:1494: cat stderr -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -647. regression.at:1355: ok -623. existing.at:1460: ok -./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +input.y:24.5-19: error: rule useless in parser due to conflicts [-Werror=other] +input.y:28.5-19: error: rule useless in parser due to conflicts [-Werror=other] +input.y:18.1-5: error: useless precedence and associativity for TK1 [-Werror=precedence] input: | (1 + # + 1) = 1111 -./regression.at:1875: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./calc.at:1494: $PREPARSER ./calc input -653. regression.at:1874: ok +./calc.at:1491: $PREPARSER ./calc input stderr: - +./regression.at:1388: sed 's,.*/$,,' stderr 1>&2 Starting parse Entering state 0 Reading a token @@ -248176,12 +248103,12 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -./regression.at:1876: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -656. regression.at:1877: testing Lex and parse params: glr.cc ... -./regression.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: +stdout: +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=error +./regression.at:1263: $PREPARSER ./input --debug Starting parse Entering state 0 Reading a token @@ -248265,8 +248192,24 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) - -./calc.at:1494: "$PERL" -pi -e 'use strict; +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reducing stack by rule 1 (line 20): +-> $$ = nterm start () +Entering state 1 +Stack now 0 1 +Reading a token +Next token is token 'a' (PRINTER) +syntax error, unexpected 'a', expecting end of file +Error: popping nterm start () +Stack now 0 +Cleanup: discarding lookahead token 'a' (PRINTER) +DESTRUCTOR +Stack now 0 +./regression.at:1263: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -248276,16 +248219,13 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./calc.at:1494: cat stderr -./regression.at:1877: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./regression.at:1420: diff lalr.c ielr.c -648. regression.at:1408: ok +./calc.at:1491: cat stderr +644. regression.at:1230: ok input: | (1 + 1) / (1 - 1) -./calc.at:1494: $PREPARSER ./calc input -658. regression.at:1889: testing stdio.h is not needed ... -./regression.at:1906: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./calc.at:1491: $PREPARSER ./calc input stderr: +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y -Wnone,none -Werror --trace=none Starting parse Entering state 0 Reading a token @@ -248401,16 +248341,8 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -stderr: -657. regression.at:1878: testing Lex and parse params: glr2.cc ... -./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./calc.at:1494: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -659. push.at:25: testing Memory Leak for Early Deletion ... -./regression.at:1713: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -./push.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -stderr: +./calc.at:1491: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -248527,9 +248459,7 @@ Entering state 16 Cleanup: popping token end of input (2.1: ) Cleanup: popping nterm input (1.1-2.0: ) -./regression.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./calc.at:1494: "$PERL" -pi -e 'use strict; +./calc.at:1491: "$PERL" -pi -e 'use strict; s{syntax error on token \[(.*?)\] \(expected: (.*)\)} { my $unexp = $1; @@ -248539,85 +248469,202 @@ : "syntax error, unexpected $unexp"; }eg ' expout || exit 77 -./regression.at:1713: grep 'syntax error,' stderr.txt -./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./calc.at:1494: cat stderr -./push.at:75: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -570. calc.at:1494: ok -./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +stderr: +stdout: +./existing.at:1460: $PREPARSER ./input +./calc.at:1491: cat stderr +stderr: +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +566. calc.at:1491: ok +./regression.at:1388: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall -o input.c input.y --warnings=none -Werror --trace=none +623. existing.at:1460: ok +651. regression.at:1628: testing LAC: Exploratory stack ... +./regression.at:1713: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ + -Dparse.lac.es-capacity-initial=1 \ + -Dparse.lac.memory-trace=full -o input.c input.y + + +./regression.at:1417: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c -Dlr.type=ielr input.y +stderr: +stdout: +./regression.at:1393: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1713: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1144: $PREPARSER ./dancer +stderr: +syntax error, unexpected ':' +./regression.at:1144: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +input.y: In function 'yyparse': +input.y:59:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] + 59 | fprintf (stderr, " yymsg_alloc = %d\n", yymsg_alloc); + | ^~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~ + | | + | long int +input.y:60:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] + 60 | fprintf (stderr, " YYSTACK_ALLOC_MAXIMUM = %d\n", + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +input.y:62:24: warning: format '%d' expects argument of type 'int', but argument 3 has type 'long int' [-Wformat=] + 62 | fprintf (stderr, " YYSIZE_MAXIMUM = %d\n", YYSIZE_MAXIMUM); + | ^~~~~~~~~~~~~~~~~~~~~~~~~ +stdout: +653. regression.at:1874: testing Lex and parse params: yacc.c ... +./regression.at:1874: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +639. regression.at:1144: ok +./regression.at:1613: $PREPARSER ./input +652. regression.at:1739: testing LAC: Memory exhaustion ... +./regression.at:1771: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y +stderr: +syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B or 123456789112345678921234567893123456789412345678951234567896123C +syntax error, unexpected 'd' +syntax error +memory exhausted +./regression.at:1613: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +650. regression.at:1504: ok +stdout: + +./regression.at:1483: $PREPARSER ./input +stderr: +syntax error, unexpected 'a', expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B +syntax error, unexpected end of file, expecting 123456789112345678921234567893123456789412345678951234567896123A or 123456789112345678921234567893123456789412345678951234567896123B +./regression.at:1483: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./regression.at:1874: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror +649. regression.at:1430: ok + +./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +654. regression.at:1875: testing Lex and parse params: glr.c ... +./regression.at:1875: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +655. regression.at:1876: testing Lex and parse params: lalr1.cc ... +./regression.at:1876: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./regression.at:1876: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./regression.at:1875: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +656. regression.at:1877: testing Lex and parse params: glr.cc ... +./regression.at:1877: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./regression.at:1877: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./regression.at:1221: $PREPARSER ./expect2 +stderr: +syntax error, unexpected '+', expecting A or B +./regression.at:1221: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +642. regression.at:1221: ok + +stderr: +stdout: +stderr: +./regression.at:1394: $PREPARSER ./input +stderr: +stdout: +./regression.at:1394: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1874: $PREPARSER ./input +stderr: +647. regression.at:1355: ok +./regression.at:1874: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +653. regression.at:1874: ok + +./regression.at:1420: diff lalr.c ielr.c + +648. regression.at:1408: ok +657. regression.at:1878: testing Lex and parse params: glr2.cc ... +./regression.at:1878: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y + ./regression.at:1878: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +658. regression.at:1889: testing stdio.h is not needed ... +./regression.at:1906: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stdout: +./regression.at:1713: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +659. push.at:25: testing Memory Leak for Early Deletion ... +./push.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +stderr: +./regression.at:1713: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1713: grep 'syntax error,' stderr.txt 660. push.at:84: testing Multiple impure instances ... ./push.at:134: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y - +./regression.at:1713: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1906: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1713: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt ./regression.at:1713: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -stderr: -stdout: -./regression.at:1222: $PREPARSER ./expect2 +./push.at:75: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./regression.at:1714: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ -Dparse.lac.es-capacity-initial=1 \ -Dparse.lac.memory-trace=full -o input.c input.y +./push.at:134: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -syntax error, unexpected '+', expecting A or B +stdout: +./regression.at:1714: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: +stdout: +658. regression.at:1889: ok +./existing.at:74: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none + +stderr: +stdout: +./push.at:134: $PREPARSER ./input +stderr: +stderr: +./push.at:134: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./regression.at:1222: $PREPARSER ./expect2 stderr: +661. push.at:145: testing Unsupported Skeletons ... +syntax error, unexpected '+', expecting A or B ./regression.at:1222: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./push.at:135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y +./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y +643. regression.at:1222: ok +stderr: +stdout: +./push.at:76: $PREPARSER ./input +stderr: +stderr: +stderr: stdout: +stdout: +./push.at:76: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1875: $PREPARSER ./input +./push.at:135: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ./regression.at:1145: $PREPARSER ./dancer -./push.at:134: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: + +stderr: +659. push.at:25: ok syntax error, unexpected ':' ./regression.at:1145: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -643. regression.at:1222: ok +./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +654. regression.at:1875: ok 640. regression.at:1145: ok -stderr: -stderr: -stdout: -stdout: -661. push.at:145: testing Unsupported Skeletons ... -./push.at:156: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret input.y -./regression.at:1771: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -658. regression.at:1889: ok -./regression.at:1714: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +661. push.at:145: ok + 662. push.at:167: testing Pstate reuse ... ./push.at:276: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -661. push.at:145: ok -stderr: -stdout: -./push.at:76: $PREPARSER ./input -stderr: 663. c++.at:26: testing C++ Locations Unit Tests ... - -./push.at:76: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y 664. c++.at:107: testing C++ Variant-based Symbols Unit Tests ... ./c++.at:234: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.yy -659. push.at:25: ok -./push.at:276: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS - -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -./existing.at:74: sed -n 's/^State //p' input.output | tail -1 665. c++.at:247: testing Multiple occurrences of $n and api.value.automove ... -stderr: ./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret input.yy -stdout: -./push.at:134: $PREPARSER ./input -stderr: -./push.at:134: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./push.at:135: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -======== Testing with C++ standard flags: '' 666. c++.at:566: testing Variants lalr1.cc ... -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./push.at:276: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS ======== Testing with C++ standard flags: '' ./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Werror -./push.at:135: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./push.at:135: $PREPARSER ./input ./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: +stderr: input.yy:16.33-34: error: multiple occurrences of $2 with api.value.automove [-Werror=other] 16 | | "twice" exp { $$ = $2 + $2; } | ^~ @@ -248627,27 +248674,13 @@ input.yy:17.40-41: error: multiple occurrences of $2 with api.value.automove [-Werror=other] 17 | | "thrice" exp[val] { $$ = $2 + $val + $2; } | ^~ -./c++.at:263: sed 's,.*/$,,' stderr 1>&2 -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=error -stderr: -stdout: -./push.at:277: ./input -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none -662. push.at:167: ok - -./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none -stderr: -stdout: -./push.at:135: $PREPARSER ./input -stderr: ./push.at:135: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: 660. push.at:84: ok +stderr: stdout: +./c++.at:263: sed 's,.*/$,,' stderr 1>&2 ./regression.at:1772: $PREPARSER ./input --debug -667. c++.at:567: testing Variants lalr1.cc parse.assert ... stderr: - Starting parse Entering state 0 Stack now 0 @@ -248658,94 +248691,85 @@ memory exhausted Cleanup: discarding lookahead token "end of file" () Stack now 0 -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./regression.at:1772: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -665. c++.at:247: ok -./regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y -stderr: +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=error -stdout: -./regression.at:1875: $PREPARSER ./input -stderr: -668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ... -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -Dparse.lac.es-capacity-initial=1 -o input.c input.y stderr: -./regression.at:1875: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./regression.at:1714: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -654. regression.at:1875: ok -stderr: ./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: ./regression.at:1714: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy -Wnone,none -Werror --trace=none ./regression.at:1714: grep 'syntax error,' stderr.txt -669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ... -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./regression.at:1714: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt ./regression.at:1714: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt - -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +667. c++.at:567: testing Variants lalr1.cc parse.assert ... +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./regression.at:1714: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:263: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret input.yy --warnings=none -Werror --trace=none ./regression.at:1715: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ -Dparse.lac.es-capacity-initial=1 \ -Dparse.lac.memory-trace=full -o input.c input.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +665. c++.at:247: ok + ./regression.at:1715: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ... -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./existing.at:74: $PREPARSER ./input -stderr: -./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -618. existing.at:74: ok +./push.at:277: ./input +662. push.at:167: ok +668. c++.at:568: testing Variants lalr1.cc parse.assert api.value.automove ... +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +669. c++.at:569: testing Variants lalr1.cc parse.assert %locations ... +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./existing.at:74: sed -n 's/^State //p' input.output | tail -1 stderr: +./existing.at:74: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: -./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: +./regression.at:1787: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stdout: ./regression.at:1876: $PREPARSER ./input stderr: -671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ... ./regression.at:1876: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y 655. regression.at:1876: ok -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +670. c++.at:570: testing Variants lalr1.cc parse.assert %code {\n#define TWO_STAGE_BUILD\n} ... +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./regression.at:1715: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt stderr: ./regression.at:1715: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./regression.at:1715: grep 'syntax error,' stderr.txt -672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ... ./regression.at:1715: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt ./regression.at:1715: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt ./regression.at:1715: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -stderr: -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./regression.at:1716: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full \ -Dparse.lac.es-capacity-initial=1 \ -Dparse.lac.memory-trace=full -o input.c input.y +stderr: +stdout: ./regression.at:1877: $PREPARSER ./input stderr: ./regression.at:1877: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 656. regression.at:1877: ok - ./regression.at:1716: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ... + +671. c++.at:571: testing Variants lalr1.cc parse.assert api.token.constructor ... ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./regression.at:1788: $PREPARSER ./input --debug @@ -248766,98 +248790,41 @@ ./regression.at:1788: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 652. regression.at:1739: ok -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ... -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./regression.at:1716: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt -stderr: -./regression.at:1716: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1716: grep 'syntax error,' stderr.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./regression.at:1716: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt -./regression.at:1719: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.cc input.y -./regression.at:1719: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +672. c++.at:572: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} ... ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./torture.at:395: $PREPARSER ./input +./existing.at:74: $PREPARSER ./input stderr: -./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:74: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +618. existing.at:74: ok stderr: stdout: -613. torture.at:385: ok ./c++.at:92: $PREPARSER ./input + stderr: ./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - stderr: stdout: -./regression.at:1878: $PREPARSER ./input -stderr: -./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1716: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt ./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -657. regression.at:1878: ok -675. c++.at:584: testing Variants and Typed Midrule Actions ... -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -676. c++.at:794: testing Doxygen Public Documentation ... -./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:794: doxygen --version || exit 77 -stderr: ---- /dev/null 2024-05-20 04:47:26.000000000 -1200 -+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr 2024-05-21 12:00:34.329227659 -1200 -@@ -0,0 +1 @@ -+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found -stdout: -stdout: -676. c++.at:794: ./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS - skipped (c++.at:794) - -stderr: -677. c++.at:795: testing Doxygen Private Documentation ... -./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:795: doxygen --version || exit 77 -stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ---- /dev/null 2024-05-20 04:47:26.000000000 -1200 -+++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr 2024-05-21 12:00:34.773227659 -1200 -@@ -0,0 +1 @@ -+/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found -stdout: -677. c++.at:795: skipped (c++.at:795) - -stderr: -678. c++.at:848: testing Relative namespace references ... -./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -stdout: +673. c++.at:573: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations ... stderr: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./regression.at:1716: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./regression.at:1716: grep 'syntax error,' stderr.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +./regression.at:1716: "$PERL" -0777 -ne 'print s/\(realloc//g;' < stderr.txt +./regression.at:1719: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.cc input.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./regression.at:1719: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Werror stderr: input.y:202.20: error: empty rule without %empty [-Werror=empty-rule] input.y:270.7: error: empty rule without %empty [-Werror=empty-rule] @@ -248950,17 +248917,57 @@ ./existing.at:1460: sed 's,.*/$,,' stderr 1>&2 ./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +stderr: +./c++.at:92: $PREPARSER ./input +stdout: +stderr: +./torture.at:395: $PREPARSER ./input +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./torture.at:395: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +613. torture.at:385: ok + +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +674. c++.at:574: testing Variants lalr1.cc parse.assert api.token.constructor api.token.prefix={TOK_} %locations api.value.automove ... +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./regression.at:1878: $PREPARSER ./input +stderr: +./regression.at:1878: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +657. regression.at:1878: ok + +stderr: +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +675. c++.at:584: testing Variants and Typed Midrule Actions ... +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: stdout: ./c++.at:566: $here/modern -./c++.at:568: $here/modern stdout: Modern C++: 201703 -stdout: ./c++.at:566: $PREPARSER ./list -Modern C++: 201703 -./c++.at:568: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -248983,6 +248990,42 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./regression.at:1719: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +stderr: +./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./regression.at:1719: grep 'syntax error,' stderr.txt +./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt +stderr: +stdout: +./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +651. regression.at:1628: skipped (regression.at:1727) + +676. c++.at:794: testing Doxygen Public Documentation ... +./c++.at:794: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +stderr: +stdout: +./c++.at:794: doxygen --version || exit 77 +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +--- /dev/null 2023-04-20 00:26:40.000000000 +1400 ++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/stderr 2023-04-20 07:49:07.549716587 +1400 +@@ -0,0 +1 @@ ++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/676/test-source: line 180: doxygen: command not found +stdout: +stdout: +./c++.at:568: $here/modern +676. c++.at:794: stdout: +Modern C++: 201703 +./c++.at:568: $PREPARSER ./list + skipped (c++.at:794) stderr: Destroy: "" Destroy: "" @@ -249005,30 +249048,44 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y + ======== Testing with C++ standard flags: '' ./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +677. c++.at:795: testing Doxygen Private Documentation ... +./c++.at:795: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stdout: -./regression.at:1719: $PREPARSER ./input --debug > stdout.txt 2> stderr.txt +./c++.at:235: $PREPARSER ./list stderr: stderr: -./regression.at:1719: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:570: $here/modern +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:795: doxygen --version || exit 77 +--- /dev/null 2023-04-20 00:26:40.000000000 +1400 ++++ /build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/stderr 2023-04-20 07:49:08.157693143 +1400 +@@ -0,0 +1 @@ ++/build/bison-3.8.2+dfsg/tests/testsuite.dir/at-groups/677/test-source: line 180: doxygen: command not found +stdout: +677. c++.at:795: skipped (c++.at:795) + +stderr: +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +678. c++.at:848: testing Relative namespace references ... +./c++.at:849: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: -./regression.at:1719: grep 'syntax error,' stderr.txt stdout: +./c++.at:567: $here/modern stdout: Modern C++: 201703 -./c++.at:570: $PREPARSER ./list -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./regression.at:1719: "$PERL" -0777 -ne 'print s/inconsistent default reduction//g;' stdout.txt +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:567: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249051,17 +249108,24 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./regression.at:1719: "$PERL" -0777 -ne 'print s/\bconsistent default reduction//g;' stdout.txt -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./regression.at:1727: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dparse.lac=full -o input.java input.y +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:567: $here/modern +======== Testing with C++ standard flags: '' +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:92: $PREPARSER ./input +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:569: $here/modern stdout: Modern C++: 201703 -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $PREPARSER ./list +./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249084,29 +249148,51 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -651. regression.at:1628: skipped (regression.at:1727) +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y - stdout: -./c++.at:92: $PREPARSER ./input +./c++.at:570: $here/modern +stdout: +Modern C++: 201703 +./c++.at:570: $PREPARSER ./list stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stdout: -./c++.at:569: $here/modern +./c++.at:572: $here/modern stdout: Modern C++: 201703 -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:569: $PREPARSER ./list +./c++.at:572: $PREPARSER ./list +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: Destroy: "0" Destroy: "0" @@ -249129,18 +249215,11 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -679. c++.at:854: testing Absolute namespace references ... -./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: ./c++.at:571: $here/modern stdout: @@ -249169,19 +249248,11 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: ./c++.at:659: $PREPARSER ./input stderr: Starting parse @@ -249234,6 +249305,10 @@ ./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +stderr: +stdout: ./c++.at:573: $here/modern stdout: Modern C++: 201703 @@ -249265,40 +249340,34 @@ ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:572: $here/modern -stdout: -Modern C++: 201703 -./c++.at:572: $PREPARSER ./list +./c++.at:849: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:92: $PREPARSER ./input +stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: ./c++.at:574: $here/modern stdout: +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Modern C++: 201703 ./c++.at:574: $PREPARSER ./list stderr: @@ -249326,85 +249395,17 @@ ./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:568: $here/modern -stdout: -Legac++ -./c++.at:568: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: ./c++.at:566: $here/modern -stdout: -stderr: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stdout: Legac++ ./c++.at:566: $PREPARSER ./list stderr: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS Destroy: "0" Destroy: "0" Destroy: 1 @@ -249431,29 +249432,11 @@ ./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -stderr: -stdout: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none -stderr: stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: -stdout: -./c++.at:571: $here/modern +./c++.at:568: $here/modern stdout: Legac++ -./c++.at:571: $PREPARSER ./list +./c++.at:568: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249476,46 +249459,24 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:569: $here/modern -stdout: -Legac++ -./c++.at:569: $PREPARSER ./list +./c++.at:849: $PREPARSER ./input stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:659: $PREPARSER ./input stderr: -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y Starting parse Entering state 0 Stack now 0 @@ -249561,144 +249522,24 @@ Cleanup: popping nterm expr (40) destroy: 40 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y ./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] -input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] -input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] -input.y: error: 1876 shift/reduce conflicts [-Werror=conflicts-sr] -input.y: error: 144 reduce/reduce conflicts [-Werror=conflicts-rr] -input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] -input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] -input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] -input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] -input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] -input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] -input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] -stderr: -stdout: -./existing.at:808: sed 's,.*/$,,' stderr 1>&2 -./c++.at:570: $here/modern -stdout: -Legac++ -./c++.at:570: $PREPARSER ./list -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: -./c++.at:567: $here/modern -stdout: -Legac++ -./c++.at:567: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' stderr: -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: +stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:573: $here/modern +./c++.at:567: $here/modern stdout: Legac++ -./c++.at:573: $PREPARSER ./list +./c++.at:567: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249721,22 +249562,16 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -stderr: +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: -./c++.at:235: $PREPARSER ./list -./c++.at:574: $here/modern +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: $here/modern +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS Legac++ -./c++.at:574: $PREPARSER ./list +./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249759,19 +249594,26 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stderr: stderr: stdout: +stdout: +stdout: +./c++.at:570: $here/modern ./c++.at:572: $here/modern -stderr: stdout: +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +Legac++ +./c++.at:570: $PREPARSER ./list Legac++ ./c++.at:572: $PREPARSER ./list -stdout: +stderr: stderr: Destroy: "0" Destroy: "0" @@ -249794,28 +249636,6 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:849: $PREPARSER ./input -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:568: $here/modern -stdout: -Legac++ -./c++.at:568: $PREPARSER ./list -stderr: Destroy: "0" Destroy: "0" Destroy: 1 @@ -249837,24 +249657,35 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:92: $PREPARSER ./input stderr: ./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./c++.at:849: $PREPARSER ./input +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:566: $here/modern +./c++.at:571: $here/modern stdout: Legac++ -./c++.at:566: $PREPARSER ./list +./c++.at:571: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -249877,10 +249708,66 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +input.y:128.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:137.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:142.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:161.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:179.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:205.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:213.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:225.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:292.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:294.20: error: empty rule without %empty [-Werror=empty-rule] +input.y:367.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:373.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:387.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:401.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:413.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:443.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:471.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:474.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:489.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:506.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:587.18: error: empty rule without %empty [-Werror=empty-rule] +input.y:591.18: error: empty rule without %empty [-Werror=empty-rule] +input.y: error: 1876 shift/reduce conflicts [-Werror=conflicts-sr] +input.y: error: 144 reduce/reduce conflicts [-Werror=conflicts-rr] +input.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +input.y:72.1-5: error: useless associativity for HQUA, use %precedence [-Werror=precedence] +input.y:53.1-6: error: useless associativity for HASSIGN, use %precedence [-Werror=precedence] +input.y:54.1-5: error: useless associativity for HORELSE, use %precedence [-Werror=precedence] +input.y:55.1-5: error: useless associativity for HANDTHEN, use %precedence [-Werror=precedence] +input.y:61.1-5: error: useless associativity for HNOT, use %precedence [-Werror=precedence] +input.y:68.1-5: error: useless associativity for UNEAR, use %precedence [-Werror=precedence] +input.y: error: fix-its can be applied. Rerun with option '--update'. [-Werror=other] +./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./existing.at:808: sed 's,.*/$,,' stderr 1>&2 +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=error +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +stderr: +stdout: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:849: $PREPARSER ./input +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:659: $PREPARSER ./input @@ -249935,37 +249822,17 @@ ./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:566: $here/modern stderr: stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: +Legac++ +./c++.at:566: $PREPARSER ./list +./c++.at:573: $here/modern stdout: -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -stdout: -./c++.at:571: $here/modern -stdout: Legac++ -./c++.at:571: $PREPARSER ./list -stderr: +./c++.at:573: $PREPARSER ./list Destroy: "0" Destroy: "0" Destroy: 1 @@ -249987,23 +249854,8 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:235: $PREPARSER ./list -stderr: -stderr: -stdout: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:570: $here/modern -======== Testing with C++ standard flags: '' -stdout: -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -Legac++ -./c++.at:570: $PREPARSER ./list stderr: +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Destroy: "0" Destroy: "0" Destroy: 1 @@ -250025,18 +249877,25 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./existing.at:1460: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:567: $here/modern +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:568: $here/modern stdout: Legac++ -./c++.at:567: $PREPARSER ./list +./c++.at:568: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250059,15 +249918,19 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:569: $here/modern +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:574: $here/modern stdout: -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y Legac++ -./c++.at:569: $PREPARSER ./list +./c++.at:574: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250090,14 +249953,13 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:92: $PREPARSER ./input @@ -250108,13 +249970,8 @@ ./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stderr: -stdout: -stdout: ./c++.at:659: $PREPARSER ./input -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: stderr: Starting parse Entering state 0 @@ -250161,26 +250018,27 @@ Cleanup: popping nterm expr (40) destroy: 40 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: -stderr: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: stdout: -./c++.at:855: $PREPARSER ./input +./c++.at:849: $PREPARSER ./input stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: -./c++.at:572: $here/modern +./c++.at:567: $here/modern stdout: Legac++ -./c++.at:572: $PREPARSER ./list +./c++.at:567: $PREPARSER ./list +stderr: +stdout: stderr: +./c++.at:569: $here/modern Destroy: "0" Destroy: "0" Destroy: 1 @@ -250202,13 +250060,11 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stderr: -stdout: -./c++.at:573: $here/modern stdout: +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Legac++ -./c++.at:573: $PREPARSER ./list -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: $PREPARSER ./list +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Destroy: "0" Destroy: "0" @@ -250231,26 +250087,19 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:849: $PREPARSER ./input -stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:566: $here/modern +./c++.at:570: $here/modern stdout: -Modern C++: 201103 -./c++.at:566: $PREPARSER ./list +Legac++ +./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250273,18 +250122,29 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:574: $here/modern stdout: +./c++.at:571: $here/modern stdout: Legac++ -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:574: $PREPARSER ./list +./c++.at:571: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250307,60 +250167,84 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:568: $here/modern +./c++.at:572: $here/modern stdout: -Modern C++: 201103 -./c++.at:568: $PREPARSER ./list +Legac++ +./c++.at:572: $PREPARSER ./list stderr: -Destroy: "" -Destroy: "" +Destroy: "0" +Destroy: "0" Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) Destroy: "" Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:855: $PREPARSER ./input +./c++.at:849: $PREPARSER ./input stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:566: $here/modern +stdout: +Modern C++: 201103 +./c++.at:566: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:659: $PREPARSER ./input @@ -250412,37 +250296,35 @@ ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:92: $PREPARSER ./input -stderr: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -======== Testing with C++ standard flags: '' -./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:849: $PREPARSER ./input +stdout: +./c++.at:92: $PREPARSER ./input stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:92: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:92: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:569: $here/modern +./c++.at:573: $here/modern stdout: -Modern C++: 201103 -./c++.at:569: $PREPARSER ./list +Legac++ +./c++.at:573: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250465,48 +250347,56 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:571: $here/modern +./c++.at:568: $here/modern +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stdout: Modern C++: 201103 -./c++.at:571: $PREPARSER ./list +./c++.at:568: $PREPARSER ./list stderr: -Destroy: "0" -Destroy: "0" +Destroy: "" +Destroy: "" Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: "" Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:849: $PREPARSER ./input +stderr: +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $here/modern +./c++.at:849: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:567: $here/modern stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS Modern C++: 201103 ./c++.at:567: $PREPARSER ./list stderr: @@ -250534,19 +250424,26 @@ ./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./existing.at:1460: sed -n 's/^State //p' input.output | tail -1 -./existing.at:1460: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:570: $here/modern +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:569: $here/modern stdout: Modern C++: 201103 -./c++.at:570: $PREPARSER ./list +./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250569,28 +250466,16 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:855: $PREPARSER ./input -stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:572: $here/modern +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: $here/modern stdout: -Modern C++: 201103 -./c++.at:572: $PREPARSER ./list +Legac++ +./c++.at:574: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250613,29 +250498,28 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:849: $PREPARSER ./input +./existing.at:1460: $PREPARSER ./input stderr: -./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: +624. existing.at:1460: ok stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS + stderr: stdout: -./c++.at:566: $here/modern +./c++.at:570: $here/modern +679. c++.at:854: testing Absolute namespace references ... +./c++.at:855: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stdout: -Modern C++: 201402 -./c++.at:566: $PREPARSER ./list +Modern C++: 201103 +./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250658,19 +250542,18 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stdout: +stderr: stdout: -./c++.at:574: $here/modern ./c++.at:659: $PREPARSER ./input -stdout: +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -Modern C++: 201103 -./c++.at:574: $PREPARSER ./list +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS Starting parse Entering state 0 Stack now 0 @@ -250716,42 +250599,49 @@ Cleanup: popping nterm expr (40) destroy: 40 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -Destroy: "" -Destroy: "" +stdout: +./c++.at:571: $here/modern +stdout: +Modern C++: 201103 +./c++.at:571: $PREPARSER ./list +stderr: +Destroy: "0" +Destroy: "0" Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) Destroy: "" Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:573: $here/modern +./c++.at:566: $here/modern stdout: -Modern C++: 201103 -./c++.at:573: $PREPARSER ./list +Modern C++: 201402 +./c++.at:566: $PREPARSER ./list stderr: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS Destroy: "0" Destroy: "0" Destroy: 1 @@ -250773,110 +250663,88 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:92: $PREPARSER ./input -stderr: -./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -663. c++.at:26: ok - -680. c++.at:863: testing Syntactically invalid namespace references ... -./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:568: $here/modern +./c++.at:572: $here/modern stdout: -Modern C++: 201402 -./c++.at:568: $PREPARSER ./list +Modern C++: 201103 +./c++.at:572: $PREPARSER ./list stderr: -Destroy: "" -Destroy: "" +stderr: +Destroy: "0" +Destroy: "0" Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) Destroy: "" Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -stderr: stdout: -./c++.at:235: $PREPARSER ./list +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:92: $PREPARSER ./input stderr: +./c++.at:92: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -680. c++.at:863: ok +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +663. c++.at:26: ok +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +680. c++.at:863: testing Syntactically invalid namespace references ... +./c++.at:864: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: +./c++.at:865: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stdout: -./existing.at:1460: $PREPARSER ./input -stderr: -stderr: -stdout: -./existing.at:1460: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -624. existing.at:1460: ok -681. c++.at:884: testing Syntax error discarding no lookahead ... -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y - -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:868: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: stdout: -./c++.at:855: $PREPARSER ./input +./c++.at:849: $PREPARSER ./input stderr: -./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -682. c++.at:1064: testing Syntax error as exception: lalr1.cc ... +./c++.at:849: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:850: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:869: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: stdout: -./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:870: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +680. c++.at:863: ok + +681. c++.at:884: testing Syntax error discarding no lookahead ... ======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./c++.at:850: $PREPARSER ./input +./c++.at:235: $PREPARSER ./list stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:571: $here/modern +./c++.at:573: $here/modern stdout: -Modern C++: 201402 -./c++.at:571: $PREPARSER ./list +Modern C++: 201103 +./c++.at:573: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -250899,68 +250767,71 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y -Wnone,none -Werror --trace=none ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:567: $here/modern +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:568: $here/modern stdout: Modern C++: 201402 -./c++.at:567: $PREPARSER ./list +./c++.at:568: $PREPARSER ./list stderr: -Destroy: "0" -Destroy: "0" +Destroy: "" +Destroy: "" Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: "" Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:850: $PREPARSER ./input stderr: +stderr: +stdout: ./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stderr: stdout: -stdout: -./c++.at:569: $here/modern ./c++.at:659: $PREPARSER ./input stderr: -stdout: -stdout: -Modern C++: 201402 -./c++.at:569: $PREPARSER ./list stderr: -./c++.at:941: $PREPARSER ./input Starting parse Entering state 0 Stack now 0 @@ -251006,8 +250877,39 @@ Cleanup: popping nterm expr (40) destroy: 40 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +======== Testing with C++ standard flags: '' +./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:569: $here/modern +stdout: +stderr: +Modern C++: 201402 +./c++.at:569: $PREPARSER ./list +stdout: +./c++.at:941: $PREPARSER ./input stderr: stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Destroy: "0" Destroy: "0" Destroy: 1 @@ -251029,29 +250931,51 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:659: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ======== Testing with C++ standard flags: '' ./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:659: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:574: $here/modern +stdout: +Modern C++: 201103 +./c++.at:574: $PREPARSER ./list ./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stderr: stdout: -./c++.at:572: $here/modern +./c++.at:567: $here/modern stdout: Modern C++: 201402 -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:572: $PREPARSER ./list +./c++.at:567: $PREPARSER ./list +stderr: stderr: Destroy: "0" Destroy: "0" @@ -251074,15 +250998,13 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: stdout: ./c++.at:566: $here/modern +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: Modern C++: 201703 ./c++.at:566: $PREPARSER ./list +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: Destroy: "0" Destroy: "0" @@ -251106,33 +251028,17 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:856: $PREPARSER ./input -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./c++.at:570: $here/modern stdout: Modern C++: 201402 ./c++.at:570: $PREPARSER ./list +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: Destroy: "0" Destroy: "0" @@ -251155,84 +251061,63 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stderr: -stdout: ./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:235: $PREPARSER ./list +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:235: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:574: $here/modern +./c++.at:571: $here/modern stdout: Modern C++: 201402 -./c++.at:574: $PREPARSER ./list +./c++.at:571: $PREPARSER ./list stderr: -Destroy: "" -Destroy: "" +Destroy: "0" +Destroy: "0" Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) Destroy: "" Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -stderr: -stdout: -stderr: -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:1064: $PREPARSER ./input < in -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:856: $PREPARSER ./input -stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:855: $PREPARSER ./input stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:941: $PREPARSER ./input @@ -251243,7 +251128,21 @@ ./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +stderr: +stdout: +./c++.at:235: $PREPARSER ./list +stderr: +./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +664. c++.at:107: ok + +stderr: +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +682. c++.at:1064: testing Syntax error as exception: lalr1.cc ... +./c++.at:1064: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: ./c++.at:573: $here/modern @@ -251275,26 +251174,13 @@ ./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:567: $here/modern stderr: stdout: -Modern C++: 201703 +./c++.at:572: $here/modern stdout: -./c++.at:567: $PREPARSER ./list -./c++.at:568: $here/modern +Modern C++: 201402 +./c++.at:572: $PREPARSER ./list stderr: -stdout: -Modern C++: 201703 Destroy: "0" Destroy: "0" Destroy: 1 @@ -251316,41 +251202,21 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:568: $PREPARSER ./list -stderr: +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:850: $PREPARSER ./input stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: ./c++.at:659: $PREPARSER ./input -======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: Starting parse Entering state 0 @@ -251398,30 +251264,86 @@ destroy: 40 ./c++.at:659: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 675. c++.at:584: ok -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS 683. c++.at:1065: testing Syntax error as exception: glr.cc ... -stderr: ./c++.at:1065: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: stdout: -./c++.at:856: $PREPARSER ./input +./c++.at:941: $PREPARSER ./input stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +./c++.at:568: $here/modern +stdout: +Modern C++: 201703 +./c++.at:568: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:571: $here/modern +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:569: $here/modern stdout: Modern C++: 201703 -./c++.at:571: $PREPARSER ./list +./c++.at:569: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251444,10 +251366,16 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +stdout: +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stdout: ======== Testing with C++ standard flags: '' -./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: ./c++.at:941: $PREPARSER ./input @@ -251456,18 +251384,15 @@ Discarding 'a'. Reducing 'a'. ./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: ======== Testing with C++ standard flags: '' ./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stdout: -./c++.at:572: $here/modern +./c++.at:567: $here/modern stdout: Modern C++: 201703 -./c++.at:572: $PREPARSER ./list +./c++.at:567: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251490,53 +251415,18 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stderr: -stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -stdout: -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:850: $PREPARSER ./input -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:235: $PREPARSER ./list -stderr: -./c++.at:235: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -664. c++.at:107: ok - -stderr: stdout: -./c++.at:569: $here/modern +./c++.at:566: $here/modern +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stdout: -Modern C++: 201703 -./c++.at:569: $PREPARSER ./list -684. c++.at:1066: testing Syntax error as exception: glr2.cc ... +Modern C++: 202002 +./c++.at:566: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251559,34 +251449,33 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:856: $PREPARSER ./input +./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:1064: $PREPARSER ./input < in stderr: -stdout: ./c++.at:570: $here/modern stdout: +./c++.at:855: $PREPARSER ./input +./existing.at:808: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -Wall --report=all,no-cex --header -o input.c input.y --warnings=none -Werror --trace=none +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: Modern C++: 201703 ./c++.at:570: $PREPARSER ./list +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' stderr: +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Destroy: "0" Destroy: "0" Destroy: 1 @@ -251609,58 +251498,120 @@ Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) ./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: ======== Testing with C++ standard flags: '' ./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stdout: -./c++.at:941: $PREPARSER ./input +./c++.at:1064: $PREPARSER ./input < in +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:574: $here/modern stdout: +Modern C++: 201402 +./c++.at:574: $PREPARSER ./list +stderr: ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: +./c++.at:571: $here/modern stdout: -./c++.at:850: $PREPARSER ./input +Modern C++: 201703 +./c++.at:571: $PREPARSER ./list stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) ./c++.at:1065: $PREPARSER ./input < in +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: error: invalid expression caught error error: invalid character caught error ./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:1065: $PREPARSER ./input < in stderr: error: invalid expression ./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1065: $PREPARSER ./input < in +stdout: +./c++.at:850: $PREPARSER ./input +stderr: stderr: error: invalid character +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' ./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:566: $here/modern +./c++.at:572: $here/modern stdout: -Modern C++: 202002 -./c++.at:566: $PREPARSER ./list +Modern C++: 201703 +./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251683,24 +251634,23 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:566: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -stdout: -./c++.at:856: $PREPARSER ./input +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: +stdout: +./c++.at:855: $PREPARSER ./input ./c++.at:573: $here/modern +stderr: stdout: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Modern C++: 201703 ./c++.at:573: $PREPARSER ./list stderr: +======== Testing with C++ standard flags: '' Destroy: "0" Destroy: "0" Destroy: 1 @@ -251722,9 +251672,25 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +stderr: +stderr: +stdout: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:1064: $PREPARSER ./input < in @@ -251740,15 +251706,48 @@ ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1064: $PREPARSER ./input < in stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:574: $here/modern +./c++.at:850: $PREPARSER ./input stderr: +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -Modern C++: 201703 +======== Testing with C++ standard flags: '' +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:574: $PREPARSER ./list +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:568: $here/modern +stdout: +Modern C++: 202002 +./c++.at:568: $PREPARSER ./list +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: Destroy: "" Destroy: "" @@ -251771,98 +251770,117 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) +./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:855: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:941: $PREPARSER ./input stderr: +stderr: syntax error Discarding 'a'. Reducing 'a'. +stdout: ./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -stderr: ./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:569: $here/modern stderr: stdout: -./c++.at:568: $here/modern stdout: +./c++.at:566: $here/modern Modern C++: 202002 -./c++.at:568: $PREPARSER ./list +./c++.at:569: $PREPARSER ./list +stdout: stderr: -Destroy: "" -Destroy: "" +Destroy: "0" +Destroy: "0" Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) Destroy: "" Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Modern C++: 202100 +./c++.at:566: $PREPARSER ./list +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +Destroy: "0" +Destroy: "0" +Destroy: 1 +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) +Destroy: "" +Destroy: 3 +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) +Destroy: 5 +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) +Destroy: (0, 1, 2, 4, 6) +./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:568: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stdout: -./c++.at:856: $PREPARSER ./input +./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +666. c++.at:566: ok + stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +684. c++.at:1066: testing Syntax error as exception: glr2.cc ... +./c++.at:1066: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: -error: invalid character stdout: -./c++.at:567: $here/modern -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:570: $here/modern stdout: Modern C++: 202002 -./c++.at:567: $PREPARSER ./list +./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251885,68 +251903,31 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -======== Testing with C++ standard flags: '' -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./c++.at:1064: $PREPARSER ./input < in stderr: -stdout: -./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS error: invalid expression caught error error: invalid character caught error ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +stdout: ./c++.at:1064: $PREPARSER ./input < in +./c++.at:567: $here/modern stdout: -./c++.at:941: $PREPARSER ./input -stderr: +Modern C++: 202002 stderr: +./c++.at:567: $PREPARSER ./list error: invalid expression -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1064: $PREPARSER ./input < in -======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -error: invalid character ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:850: $PREPARSER ./input -stderr: -./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:851: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:570: $here/modern -stdout: -Modern C++: 202002 -./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -251969,17 +251950,48 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:570: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +./c++.at:567: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:856: $PREPARSER ./input +./c++.at:574: $here/modern +stdout: +Modern C++: 201703 +./c++.at:574: $PREPARSER ./list +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: ./c++.at:1065: $PREPARSER ./input < in @@ -251994,6 +252006,7 @@ error: invalid expression ./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1065: $PREPARSER ./input < in +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: error: invalid character ./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -252001,10 +252014,17 @@ ./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:571: $here/modern +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:850: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:572: $here/modern stdout: Modern C++: 202002 -./c++.at:571: $PREPARSER ./list +./c++.at:572: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -252027,13 +252047,24 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' +./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stdout: -./c++.at:569: $here/modern +./c++.at:855: $PREPARSER ./input +stderr: +./c++.at:855: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +======== Testing with C++ standard flags: '' +stdout: +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:571: $here/modern stdout: Modern C++: 202002 -./c++.at:569: $PREPARSER ./list +./c++.at:571: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -252056,21 +252087,60 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:571: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -./existing.at:808: sed -n 's/^State //p' input.output | tail -1 -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:569: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y ./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +stdout: +./c++.at:941: $PREPARSER ./input +stderr: +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +stderr: +stdout: +./c++.at:850: $PREPARSER ./input +stderr: +./c++.at:850: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:851: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./existing.at:808: sed -n 's/^State //p' input.output | tail -1 +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in ./existing.at:808: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS +stderr: +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:572: $here/modern +./c++.at:573: $here/modern +./c++.at:1064: $PREPARSER ./input < in stdout: +stderr: Modern C++: 202002 -./c++.at:572: $PREPARSER ./list +./c++.at:573: $PREPARSER ./list +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Destroy: "0" Destroy: "0" @@ -252093,46 +252163,31 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:572: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -stderr: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -stdout: -./c++.at:566: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:568: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y stderr: stdout: -./c++.at:941: $PREPARSER ./input +./c++.at:856: $PREPARSER ./input stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:941: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./c++.at:941: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS stderr: stdout: -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: ./c++.at:568: $here/modern stdout: +stderr: Modern C++: 202100 +stdout: ./c++.at:568: $PREPARSER ./list +./c++.at:1065: $PREPARSER ./input < in stderr: Destroy: "" Destroy: "" @@ -252155,161 +252210,106 @@ Destroy: "" Destroy: () Destroy: (0, 1, 2, 4, 6) +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:568: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 668. c++.at:568: ok +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS stderr: stdout: -./c++.at:566: $here/modern -stdout: -Modern C++: 202100 +./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS 685. c++.at:1360: testing Exception safety with error recovery ... -./c++.at:566: $PREPARSER ./list ./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:566: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -666. c++.at:566: ok -stderr: stdout: +./c++.at:941: $PREPARSER ./input stderr: -stdout: -./c++.at:1066: $PREPARSER ./input < in -./c++.at:1064: $PREPARSER ./input < in -stderr: -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +syntax error +Discarding 'a'. +Reducing 'a'. +./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +681. c++.at:884: ok -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -./c++.at:1064: $PREPARSER ./input < in -stderr: -stderr: -error: invalid expression -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 686. c++.at:1361: testing Exception safety without error recovery ... -./c++.at:1064: $PREPARSER ./input < in ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -./c++.at:1066: $PREPARSER ./input < in stderr: stdout: +./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:851: $PREPARSER ./input -error: invalid character -stderr: -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: +./c++.at:1360: ./exceptions || exit 77 +stderr: +Inner caught +Outer caught +./c++.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy ======== Testing with C++ standard flags: '' -./c++.at:567: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -stderr: ./c++.at:856: $PREPARSER ./input -stdout: stderr: -./c++.at:1066: ./check -./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr --std=c++98 not supported -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:857: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./existing.at:808: $PREPARSER ./input -stderr: -./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:851: $PREPARSER ./input stderr: -621. existing.at:808: ok +======== Testing with C++ standard flags: '' +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: +./c++.at:569: $here/modern stdout: -./c++.at:574: $here/modern -./c++.at:1066: ./check -stdout: --std=c++03 not supported -======== Testing with C++ standard flags: '' -Modern C++: 202002 -./c++.at:574: $PREPARSER ./list - +Modern C++: 202100 +./c++.at:569: $PREPARSER ./list stderr: -Destroy: "" -Destroy: "" +Destroy: "0" +Destroy: "0" Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: "1" +Destroy: (0) +Destroy: "2" +Destroy: "2" +Destroy: (0, 1) Destroy: "" Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () +Destroy: (0, 1, 2) +Destroy: "4" +Destroy: "4" +Destroy: (0, 1, 2) +Destroy: (0, 1, 2, 4) Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () +Destroy: (0, 1, 2, 4) +Destroy: "6" +Destroy: "6" +Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ... -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS -stderr: -stdout: +./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +669. c++.at:569: ok stderr: stdout: -./c++.at:573: $here/modern -./c++.at:1360: ./exceptions || exit 77 +./c++.at:567: $here/modern stdout: -Modern C++: 202002 -./c++.at:573: $PREPARSER ./list + +Modern C++: 202100 +./c++.at:567: $PREPARSER ./list +stderr: stderr: Destroy: "0" Destroy: "0" @@ -252332,90 +252332,47 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:573: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y -Inner caught -Outer caught -./c++.at:1360: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -stderr: stdout: -./c++.at:1066: ./check -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: +./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +667. c++.at:567: ok +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: stdout: -./c++.at:941: $PREPARSER ./input -stderr: -syntax error -Discarding 'a'. -Reducing 'a'. -./c++.at:941: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -681. c++.at:884: stderr: - ok -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in +./existing.at:808: $PREPARSER ./input stderr: + +./existing.at:808: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +621. existing.at:808: ok +687. c++.at:1362: testing Exception safety with error recovery api.value.type=variant ... +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS stderr: -error: invalid character stdout: - -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: ./exceptions || exit 77 stderr: -======== Testing with C++ standard flags: '' + +688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ... Inner caught Outer caught -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -688. c++.at:1363: testing Exception safety without error recovery api.value.type=variant ... ./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o exceptions exceptions.cc $LIBS +./c++.at:1361: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +689. c++.at:1371: testing C++ GLR parser identifier shadowing ... +./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy ======== Testing with C++ standard flags: '' ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:571: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS stderr: -./c++.at:569: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stdout: -./c++.at:570: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS -stderr: -stdout: -./c++.at:1362: ./exceptions || exit 77 -stderr: -Inner caught -Outer caught -./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -stderr: stdout: -./c++.at:567: $here/modern +./c++.at:570: $here/modern stdout: Modern C++: 202100 -./c++.at:567: $PREPARSER ./list +./c++.at:570: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -252438,38 +252395,28 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:567: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -667. c++.at:567: ok - +./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1363: ./exceptions || exit 77 -stderr: -Inner caught -Outer caught -./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy -689. c++.at:1371: testing C++ GLR parser identifier shadowing ... -./c++.at:1410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +670. c++.at:570: ok +./c++.at:1064: $PREPARSER ./input < in stderr: -stdout: -./c++.at:857: $PREPARSER ./input stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:571: $here/modern +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:572: $here/modern +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stdout: Modern C++: 202100 -./c++.at:571: $PREPARSER ./list +./c++.at:572: $PREPARSER ./list +./c++.at:1066: $PREPARSER ./input < in stderr: +./c++.at:1064: $PREPARSER ./input < in Destroy: "0" Destroy: "0" Destroy: 1 @@ -252491,70 +252438,146 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) +./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./c++.at:1064: $PREPARSER ./input < in -./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: + error: invalid expression caught error error: invalid character caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid expression ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -671. c++.at:571: ok +672. c++.at:572: ok +./c++.at:1066: $PREPARSER ./input < in ./c++.at:1064: $PREPARSER ./input < in stderr: - error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in stderr: +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr error: invalid character ./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS 690. c++.at:1422: testing Shared locations ... + +./c++.at:1066: $PREPARSER ./input < in ./c++.at:1456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x1.cc x1.yy +======== Testing with C++ standard flags: '' stderr: +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +======== Testing with C++ standard flags: '' stdout: -./c++.at:572: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' ./c++.at:1456: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x1.o x1.cc +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +691. c++.at:1517: testing Default action ... +======== Testing with C++ standard flags: '' stderr: stdout: -./c++.at:569: $here/modern +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1362: ./exceptions || exit 77 +stderr: +Inner caught +Outer caught +stderr: +./c++.at:1362: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy stdout: -Modern C++: 202100 -./c++.at:569: $PREPARSER ./list +./c++.at:1065: $PREPARSER ./input < in stderr: -Destroy: "0" -Destroy: "0" +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: +stderr: +error: invalid expression +stdout: +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./c++.at:1065: $PREPARSER ./input < in +======== Testing with C++ standard flags: '' +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +stderr: +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +error: invalid character +./c++.at:1555: ./check +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stderr: +stderr: +stdout: +stdout: +./c++.at:1363: ./exceptions || exit 77 +./c++.at:851: $PREPARSER ./input +stderr: +stderr: +stderr: +Inner caught +Outer caught +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +stdout: +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:574: $here/modern +stdout: +./c++.at:1363: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc --report=all input.yy +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +Modern C++: 202002 +stderr: +./c++.at:574: $PREPARSER ./list +stdout: +stderr: +./c++.at:1066: ./check +Destroy: "" +Destroy: "" Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: "" Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () Destroy: (0, 1, 2, 4, 6) -./c++.at:569: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:574: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o list.cc list.y +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:570: $here/modern +./c++.at:571: $here/modern stdout: -669. c++.at:569: ok Modern C++: 202100 -./c++.at:570: $PREPARSER ./list +./c++.at:571: $PREPARSER ./list stderr: Destroy: "0" Destroy: "0" @@ -252577,232 +252600,439 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:570: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -670. c++.at:570: ok +./c++.at:571: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +671. c++.at:571: ok +stderr: +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o list list.cc $LIBS -691. c++.at:1517: testing Default action ... -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS 692. java.at:25: testing Java invalid directives ... ./java.at:35: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y ./java.at:50: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -fcaret YYParser.y -stderr: -stdout: -./c++.at:1555: ./check -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y 692. java.at:25: ok -stderr: +693. java.at:186: testing Java parser class and package names ... +./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:188: grep '[mb]4_' YYParser.y stdout: +693. java.at:186: skipped (java.at:188) stderr: -./c++.at:851: $PREPARSER ./input stdout: -stderr: + ======== Testing with C++ standard flags: '' ./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +694. java.at:217: testing Java parser class modifiers ... +./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:219: grep '[mb]4_' YYParser.y stdout: -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +694. java.at:217: skipped (java.at:219) + stderr: stdout: ./c++.at:1360: $PREPARSER ./input aaaas -./c++.at:1065: $PREPARSER ./input < in stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -693. java.at:186: testing Java parser class and package names ... -./java.at:188: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaal -./c++.at:1065: $PREPARSER ./input < in stderr: exception caught: yylex ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input i -./java.at:188: grep '[mb]4_' YYParser.y stderr: -stdout: exception caught: initial-action ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -693. java.at:186: ./c++.at:1360: $PREPARSER ./input aaaap - skipped (java.at:188) +./c++.at:1360: $PREPARSER ./input aaaap stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x560e374cdb40->Object::Object { } -Next token is token 'a' (0x560e374cdb40 'a') -Shifting token 'a' (0x560e374cdb40 'a') +0x5652ca562b40->Object::Object { } +Next token is token 'a' (0x5652ca562b40 'a') +Shifting token 'a' (0x5652ca562b40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560e374cdb40 'a') --> $$ = nterm item (0x560e374cdb40 'a') + $1 = token 'a' (0x5652ca562b40 'a') +-> $$ = nterm item (0x5652ca562b40 'a') Entering state 11 Stack now 0 11 Reading a token -0x560e374cdb90->Object::Object { 0x560e374cdb40 } -Next token is token 'a' (0x560e374cdb90 'a') -Shifting token 'a' (0x560e374cdb90 'a') +0x5652ca562b90->Object::Object { 0x5652ca562b40 } +Next token is token 'a' (0x5652ca562b90 'a') +Shifting token 'a' (0x5652ca562b90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560e374cdb90 'a') --> $$ = nterm item (0x560e374cdb90 'a') + $1 = token 'a' (0x5652ca562b90 'a') +-> $$ = nterm item (0x5652ca562b90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x560e374cdbe0->Object::Object { 0x560e374cdb40, 0x560e374cdb90 } -Next token is token 'a' (0x560e374cdbe0 'a') -Shifting token 'a' (0x560e374cdbe0 'a') +0x5652ca562be0->Object::Object { 0x5652ca562b40, 0x5652ca562b90 } +Next token is token 'a' (0x5652ca562be0 'a') +Shifting token 'a' (0x5652ca562be0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560e374cdbe0 'a') --> $$ = nterm item (0x560e374cdbe0 'a') + $1 = token 'a' (0x5652ca562be0 'a') +-> $$ = nterm item (0x5652ca562be0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x560e374cdc30->Object::Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0 } -Next token is token 'a' (0x560e374cdc30 'a') -Shifting token 'a' (0x560e374cdc30 'a') +0x5652ca562c30->Object::Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0 } +Next token is token 'a' (0x5652ca562c30 'a') +Shifting token 'a' (0x5652ca562c30 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560e374cdc30 'a') --> $$ = nterm item (0x560e374cdc30 'a') + $1 = token 'a' (0x5652ca562c30 'a') +-> $$ = nterm item (0x5652ca562c30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x560e374cdc80->Object::Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0, 0x560e374cdc30 } -Next token is token 'p' (0x560e374cdc80 'p'Exception caught: cleaning lookahead and stack -0x560e374cdc80->Object::~Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0, 0x560e374cdc30, 0x560e374cdc80 } -0x560e374cdc30->Object::~Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0, 0x560e374cdc30 } -0x560e374cdbe0->Object::~Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0 } -0x560e374cdb90->Object::~Object { 0x560e374cdb40, 0x560e374cdb90 } -0x560e374cdb40->Object::~Object { 0x560e374cdb40 } +0x5652ca562c80->Object::Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0, 0x5652ca562c30 } +Next token is token 'p' (0x5652ca562c80 'p'Exception caught: cleaning lookahead and stack +0x5652ca562c80->Object::~Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0, 0x5652ca562c30, 0x5652ca562c80 } +0x5652ca562c30->Object::~Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0, 0x5652ca562c30 } +0x5652ca562be0->Object::~Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0 } +0x5652ca562b90->Object::~Object { 0x5652ca562b40, 0x5652ca562b90 } +0x5652ca562b40->Object::~Object { 0x5652ca562b40 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1463: sed -ne '/INCLUDED/p;/\\file/{p;n;p;}' include/ast/loc.hh stderr: +./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x2.cc x2.yy Starting parse Entering state 0 Stack now 0 Reading a token -0x560e374cdb40->Object::Object { } -Next token is token 'a' (0x560e374cdb40 'a') -Shifting token 'a' (0x560e374cdb40 'a') +0x5652ca562b40->Object::Object { } +Next token is token 'a' (0x5652ca562b40 'a') +Shifting token 'a' (0x5652ca562b40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560e374cdb40 'a') --> $$ = nterm item (0x560e374cdb40 'a') + $1 = token 'a' (0x5652ca562b40 'a') +-> $$ = nterm item (0x5652ca562b40 'a') Entering state 11 Stack now 0 11 Reading a token -0x560e374cdb90->Object::Object { 0x560e374cdb40 } -Next token is token 'a' (0x560e374cdb90 'a') -Shifting token 'a' (0x560e374cdb90 'a') +0x5652ca562b90->Object::Object { 0x5652ca562b40 } +Next token is token 'a' (0x5652ca562b90 'a') +Shifting token 'a' (0x5652ca562b90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560e374cdb90 'a') --> $$ = nterm item (0x560e374cdb90 'a') + $1 = token 'a' (0x5652ca562b90 'a') +-> $$ = nterm item (0x5652ca562b90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x560e374cdbe0->Object::Object { 0x560e374cdb40, 0x560e374cdb90 } -Next token is token 'a' (0x560e374cdbe0 'a') -Shifting token 'a' (0x560e374cdbe0 'a') +0x5652ca562be0->Object::Object { 0x5652ca562b40, 0x5652ca562b90 } +Next token is token 'a' (0x5652ca562be0 'a') +Shifting token 'a' (0x5652ca562be0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560e374cdbe0 'a') --> $$ = nterm item (0x560e374cdbe0 'a') + $1 = token 'a' (0x5652ca562be0 'a') +-> $$ = nterm item (0x5652ca562be0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x560e374cdc30->Object::Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0 } -Next token is token 'a' (0x560e374cdc30 'a') -Shifting token 'a' (0x560e374cdc30 'a') +0x5652ca562c30->Object::Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0 } +Next token is token 'a' (0x5652ca562c30 'a') +Shifting token 'a' (0x5652ca562c30 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560e374cdc30 'a') --> $$ = nterm item (0x560e374cdc30 'a') + $1 = token 'a' (0x5652ca562c30 'a') +-> $$ = nterm item (0x5652ca562c30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x560e374cdc80->Object::Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0, 0x560e374cdc30 } -Next token is token 'p' (0x560e374cdc80 'p'Exception caught: cleaning lookahead and stack -0x560e374cdc80->Object::~Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0, 0x560e374cdc30, 0x560e374cdc80 } -0x560e374cdc30->Object::~Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0, 0x560e374cdc30 } -0x560e374cdbe0->Object::~Object { 0x560e374cdb40, 0x560e374cdb90, 0x560e374cdbe0 } -0x560e374cdb90->Object::~Object { 0x560e374cdb40, 0x560e374cdb90 } -0x560e374cdb40->Object::~Object { 0x560e374cdb40 } +0x5652ca562c80->Object::Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0, 0x5652ca562c30 } +Next token is token 'p' (0x5652ca562c80 'p'Exception caught: cleaning lookahead and stack +0x5652ca562c80->Object::~Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0, 0x5652ca562c30, 0x5652ca562c80 } +0x5652ca562c30->Object::~Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0, 0x5652ca562c30 } +0x5652ca562be0->Object::~Object { 0x5652ca562b40, 0x5652ca562b90, 0x5652ca562be0 } +0x5652ca562b90->Object::~Object { 0x5652ca562b40, 0x5652ca562b90 } +0x5652ca562b40->Object::~Object { 0x5652ca562b40 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr stdout: exception caught: printer +695. java.at:287: testing Java parser class extends and implements ... ./c++.at:1360: $PREPARSER ./input aaaae stderr: -stderr: -stdout: exception caught: syntax error ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:857: $PREPARSER ./input +./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +./c++.at:1471: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x2.o x2.cc stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaaE -======== Testing with C++ standard flags: '' +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./java.at:289: grep '[mb]4_' YYParser.y exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -694. java.at:217: testing Java parser class modifiers ... -./java.at:219: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:1360: $PREPARSER ./input aaaaT +======== Testing with C++ standard flags: '' +stdout: +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +695. java.at:287: ./c++.at:1360: $PREPARSER ./input aaaaT stderr: + skipped (java.at:289) ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaaR + stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./java.at:219: grep '[mb]4_' YYParser.y +stderr: stdout: -694. java.at:217: skipped (java.at:219) +./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +696. java.at:307: testing Java %parse-param and %lex-param ... +./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:309: grep '[mb]4_' YYParser.y +stdout: +696. java.at:307: stderr: + skipped (java.at:309) +stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +697. java.at:381: testing Java throws specifications ... stderr: +./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y stdout: -./c++.at:572: $here/modern +./c++.at:1361: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./java.at:441: grep '[mb]4_' YYParser.y +stdout: +./c++.at:1361: $PREPARSER ./input i +stderr: +697. java.at:381: exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (java.at:441) +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x55a3ba1ddb40->Object::Object { } +Next token is token 'a' (0x55a3ba1ddb40 'a') +Shifting token 'a' (0x55a3ba1ddb40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55a3ba1ddb40 'a') +-> $$ = nterm item (0x55a3ba1ddb40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x55a3ba1ddb90->Object::Object { 0x55a3ba1ddb40 } +Next token is token 'a' (0x55a3ba1ddb90 'a') +Shifting token 'a' (0x55a3ba1ddb90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55a3ba1ddb90 'a') +-> $$ = nterm item (0x55a3ba1ddb90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x55a3ba1ddbe0->Object::Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90 } +Next token is token 'a' (0x55a3ba1ddbe0 'a') +Shifting token 'a' (0x55a3ba1ddbe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55a3ba1ddbe0 'a') +-> $$ = nterm item (0x55a3ba1ddbe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x55a3ba1ddc30->Object::Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0 } +Next token is token 'a' (0x55a3ba1ddc30 'a') +Shifting token 'a' (0x55a3ba1ddc30 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55a3ba1ddc30 'a') +-> $$ = nterm item (0x55a3ba1ddc30 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x55a3ba1ddc80->Object::Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0, 0x55a3ba1ddc30 } +Next token is token 'p' (0x55a3ba1ddc80 'p'Exception caught: cleaning lookahead and stack +0x55a3ba1ddc80->Object::~Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0, 0x55a3ba1ddc30, 0x55a3ba1ddc80 } +0x55a3ba1ddc30->Object::~Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0, 0x55a3ba1ddc30 } +0x55a3ba1ddbe0->Object::~Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0 } +0x55a3ba1ddb90->Object::~Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90 } +0x55a3ba1ddb40->Object::~Object { 0x55a3ba1ddb40 } +exception caught: printer +end { } +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x55a3ba1ddb40->Object::Object { } +Next token is token 'a' (0x55a3ba1ddb40 'a') +Shifting token 'a' (0x55a3ba1ddb40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55a3ba1ddb40 'a') +-> $$ = nterm item (0x55a3ba1ddb40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x55a3ba1ddb90->Object::Object { 0x55a3ba1ddb40 } +Next token is token 'a' (0x55a3ba1ddb90 'a') +Shifting token 'a' (0x55a3ba1ddb90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55a3ba1ddb90 'a') +-> $$ = nterm item (0x55a3ba1ddb90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x55a3ba1ddbe0->Object::Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90 } +Next token is token 'a' (0x55a3ba1ddbe0 'a') +Shifting token 'a' (0x55a3ba1ddbe0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55a3ba1ddbe0 'a') +-> $$ = nterm item (0x55a3ba1ddbe0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x55a3ba1ddc30->Object::Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0 } +Next token is token 'a' (0x55a3ba1ddc30 'a') +Shifting token 'a' (0x55a3ba1ddc30 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55a3ba1ddc30 'a') +-> $$ = nterm item (0x55a3ba1ddc30 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x55a3ba1ddc80->Object::Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0, 0x55a3ba1ddc30 } +Next token is token 'p' (0x55a3ba1ddc80 'p'Exception caught: cleaning lookahead and stack +0x55a3ba1ddc80->Object::~Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0, 0x55a3ba1ddc30, 0x55a3ba1ddc80 } +0x55a3ba1ddc30->Object::~Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0, 0x55a3ba1ddc30 } +0x55a3ba1ddbe0->Object::~Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90, 0x55a3ba1ddbe0 } +0x55a3ba1ddb90->Object::~Object { 0x55a3ba1ddb40, 0x55a3ba1ddb90 } +0x55a3ba1ddb40->Object::~Object { 0x55a3ba1ddb40 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE +stderr: +698. java.at:470: testing Java constructor init and init_throws ... +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./c++.at:1361: $PREPARSER ./input aaaaT +stderr: +./java.at:475: grep '[mb]4_' YYParser.y +stdout: +698. java.at:470: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + skipped (java.at:475) +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: + +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +699. java.at:497: testing Java value, position, and location types ... +./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./java.at:499: grep '[mb]4_' YYParser.y +stdout: +699. java.at:497: skipped (java.at:499) + +stderr: +stdout: +./c++.at:1555: $PREPARSER ./test +stderr: +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +700. java.at:528: testing Java syntax error handling without error token ... +./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret YYParser.y +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +700. java.at:528: skipped (java.at:580) + +stderr: +stdout: +./c++.at:1555: ./check +-std=c++98 not supported +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +stderr: +stdout: +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in +stderr: +error: invalid expression +stderr: +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:573: $here/modern +./c++.at:1064: $PREPARSER ./input < in stdout: Modern C++: 202100 -./c++.at:572: $PREPARSER ./list +./c++.at:573: $PREPARSER ./list +stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Destroy: "0" Destroy: "0" @@ -252825,1067 +253055,1936 @@ Destroy: "6" Destroy: (0, 1, 2, 4) Destroy: (0, 1, 2, 4, 6) -./c++.at:572: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -672. c++.at:572: ok -695. java.at:287: testing Java parser class extends and implements ... -./java.at:289: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +673. c++.at:573: ok +701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ... +./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y -./java.at:289: grep '[mb]4_' YYParser.y -stdout: +./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java +702. javapush.at:217: testing Trivial Push Parser with %initial-action ... +./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y +./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java +./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y stderr: stdout: -695. java.at:287: skipped (java.at:289) -./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1555: ./check +-std=c++03 not supported +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java +./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java +./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java +702. javapush.at:217: ./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y + skipped (javapush.at:230) -696. java.at:307: testing Java %parse-param and %lex-param ... -./java.at:309: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y stderr: stdout: -./c++.at:1463: sed -ne '/INCLUDED/p;/\\file/{p;n;p;}' include/ast/loc.hh -./java.at:309: grep '[mb]4_' YYParser.y -stdout: -./c++.at:1471: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o x2.cc x2.yy -696. java.at:307: skipped (java.at:309) - -697. java.at:381: testing Java throws specifications ... -./java.at:441: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:1471: $CXX $CPPFLAGS $CXXFLAGS -Iinclude -c -o x2.o x2.cc -./java.at:441: grep '[mb]4_' YYParser.y -stdout: -698. java.at:470: testing Java constructor init and init_throws ... -./java.at:475: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -697. java.at:381: skipped (java.at:441) -./java.at:475: grep '[mb]4_' YYParser.y - -stdout: -698. java.at:470: skipped (java.at:475) - -699. java.at:497: testing Java value, position, and location types ... -./java.at:499: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -700. java.at:528: testing Java syntax error handling without error token ... -./java.at:579: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret YYParser.y +./c++.at:1501: $CXX $CPPFLAGS $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS +./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java stderr: +./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java stdout: -./java.at:499: grep '[mb]4_' YYParser.y ./c++.at:1362: $PREPARSER ./input aaaas -stdout: stderr: exception caught: reduction ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -699. java.at:497: ./c++.at:1362: $PREPARSER ./input aaaal - skipped (java.at:499) +701. javapush.at:172: skipped (javapush.at:207) +./c++.at:1362: $PREPARSER ./input aaaal +stderr: stderr: exception caught: yylex ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:856: $PREPARSER ./input +stderr: ./c++.at:1362: $PREPARSER ./input i +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + stderr: exception caught: initial-action +======== Testing with C++ standard flags: '' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +703. d.at:103: testing D parser class extends and implements ... +stderr: +stdout: ./c++.at:1362: $PREPARSER ./input aaaap -700. java.at:528: stderr: - +./c++.at:1555: ./check +./d.at:106: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y +stderr: +-std=c++11 not supported +======== Testing with C++ standard flags: '' stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - skipped (java.at:580) stdout: -./c++.at:1362: $PREPARSER ./input --debug aaaap -./c++.at:573: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1065: $PREPARSER ./input < in stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: -stdout: +./c++.at:1065: $PREPARSER ./input < in Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffeca67ca67->Object::Object { } -0x7ffeca67caf0->Object::Object { 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'a' (0x7ffeca67caf0 'a') -0x7ffeca67ca40->Object::Object { 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x7ffeca67ca40, 0x7ffeca67caf0 } -Shifting token 'a' (0x7ffeca67ca40 'a') -0x5571ffe55ee0->Object::Object { 0x7ffeca67ca40 } -0x7ffeca67ca40->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67ca40 } +0x7ffd9e90cc87->Object::Object { } +0x7ffd9e90cd10->Object::Object { 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'a' (0x7ffd9e90cd10 'a') +0x7ffd9e90cc60->Object::Object { 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x7ffd9e90cc60, 0x7ffd9e90cd10 } +Shifting token 'a' (0x7ffd9e90cc60 'a') +0x55cba242fee0->Object::Object { 0x7ffd9e90cc60 } +0x7ffd9e90cc60->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cc60 } Entering state 2 Stack now 0 2 -0x7ffeca67cb10->Object::Object { 0x5571ffe55ee0 } +0x7ffd9e90cd30->Object::Object { 0x55cba242fee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5571ffe55ee0 'a') --> $$ = nterm item (0x7ffeca67cb10 'a') -0x5571ffe55ee0->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67cb10 } -0x5571ffe55ee0->Object::Object { 0x7ffeca67cb10 } -0x7ffeca67cb10->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67cb10 } + $1 = token 'a' (0x55cba242fee0 'a') +-> $$ = nterm item (0x7ffd9e90cd30 'a') +0x55cba242fee0->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cd30 } +0x55cba242fee0->Object::Object { 0x7ffd9e90cd30 } +0x7ffd9e90cd30->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cd30 } Entering state 11 Stack now 0 11 Reading a token -0x7ffeca67ca67->Object::Object { 0x5571ffe55ee0 } -0x7ffeca67caf0->Object::Object { 0x5571ffe55ee0, 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'a' (0x7ffeca67caf0 'a') -0x7ffeca67ca40->Object::Object { 0x5571ffe55ee0, 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67ca40, 0x7ffeca67caf0 } -Shifting token 'a' (0x7ffeca67ca40 'a') -0x5571ffe55f00->Object::Object { 0x5571ffe55ee0, 0x7ffeca67ca40 } -0x7ffeca67ca40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca40 } +0x7ffd9e90cc87->Object::Object { 0x55cba242fee0 } +0x7ffd9e90cd10->Object::Object { 0x55cba242fee0, 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'a' (0x7ffd9e90cd10 'a') +0x7ffd9e90cc60->Object::Object { 0x55cba242fee0, 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cc60, 0x7ffd9e90cd10 } +Shifting token 'a' (0x7ffd9e90cc60 'a') +0x55cba242ff00->Object::Object { 0x55cba242fee0, 0x7ffd9e90cc60 } +0x7ffd9e90cc60->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc60 } Entering state 2 Stack now 0 11 2 -0x7ffeca67cb10->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00 } +0x7ffd9e90cd30->Object::Object { 0x55cba242fee0, 0x55cba242ff00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5571ffe55f00 'a') --> $$ = nterm item (0x7ffeca67cb10 'a') -0x5571ffe55f00->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67cb10 } -0x5571ffe55f00->Object::Object { 0x5571ffe55ee0, 0x7ffeca67cb10 } -0x7ffeca67cb10->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67cb10 } + $1 = token 'a' (0x55cba242ff00 'a') +-> $$ = nterm item (0x7ffd9e90cd30 'a') +0x55cba242ff00->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd30 } +0x55cba242ff00->Object::Object { 0x55cba242fee0, 0x7ffd9e90cd30 } +0x7ffd9e90cd30->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd30 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffeca67ca67->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00 } -0x7ffeca67caf0->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'a' (0x7ffeca67caf0 'a') -0x7ffeca67ca40->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca40, 0x7ffeca67caf0 } -Shifting token 'a' (0x7ffeca67ca40 'a') -0x5571ffe55f20->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca40 } -0x7ffeca67ca40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca40 } +0x7ffd9e90cc87->Object::Object { 0x55cba242fee0, 0x55cba242ff00 } +0x7ffd9e90cd10->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'a' (0x7ffd9e90cd10 'a') +0x7ffd9e90cc60->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc60, 0x7ffd9e90cd10 } +Shifting token 'a' (0x7ffd9e90cc60 'a') +0x55cba242ff20->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc60 } +0x7ffd9e90cc60->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc60 } Entering state 2 Stack now 0 11 11 2 -0x7ffeca67cb10->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20 } +0x7ffd9e90cd30->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5571ffe55f20 'a') --> $$ = nterm item (0x7ffeca67cb10 'a') -0x5571ffe55f20->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67cb10 } -0x5571ffe55f20->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67cb10 } -0x7ffeca67cb10->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67cb10 } + $1 = token 'a' (0x55cba242ff20 'a') +-> $$ = nterm item (0x7ffd9e90cd30 'a') +0x55cba242ff20->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd30 } +0x55cba242ff20->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd30 } +0x7ffd9e90cd30->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd30 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffeca67ca67->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20 } -0x7ffeca67caf0->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'a' (0x7ffeca67caf0 'a') -0x7ffeca67ca40->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca40, 0x7ffeca67caf0 } -Shifting token 'a' (0x7ffeca67ca40 'a') -0x5571ffe55f40->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca40 } -0x7ffeca67ca40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67ca40 } +0x7ffd9e90cc87->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20 } +0x7ffd9e90cd10->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'a' (0x7ffd9e90cd10 'a') +0x7ffd9e90cc60->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc60, 0x7ffd9e90cd10 } +Shifting token 'a' (0x7ffd9e90cc60 'a') +0x55cba242ff40->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc60 } +0x7ffd9e90cc60->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cc60 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffeca67cb10->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40 } +0x7ffd9e90cd30->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5571ffe55f40 'a') --> $$ = nterm item (0x7ffeca67cb10 'a') -0x5571ffe55f40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67cb10 } -0x5571ffe55f40->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67cb10 } -0x7ffeca67cb10->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67cb10 } + $1 = token 'a' (0x55cba242ff40 'a') +-> $$ = nterm item (0x7ffd9e90cd30 'a') +0x55cba242ff40->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cd30 } +0x55cba242ff40->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd30 } +0x7ffd9e90cd30->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cd30 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffeca67ca67->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40 } -0x7ffeca67caf0->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'p' (0x7ffeca67caf0 'p'Exception caught: cleaning lookahead and stack -0x5571ffe55f40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67caf0 } -0x5571ffe55f20->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67caf0 } -0x5571ffe55f00->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67caf0 } -0x5571ffe55ee0->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x7ffeca67caf0 } +0x7ffd9e90cc87->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40 } +0x7ffd9e90cd10->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'p' (0x7ffd9e90cd10 'p'Exception caught: cleaning lookahead and stack +0x55cba242ff40->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cd10 } +0x55cba242ff20->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd10 } +0x55cba242ff00->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd10 } +0x55cba242fee0->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x7ffd9e90cd10 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./d.at:106: grep '[mb]4_' YYParser.y stderr: +error: invalid expression +stderr: +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffeca67ca67->Object::Object { } -0x7ffeca67caf0->Object::Object { 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'a' (0x7ffeca67caf0 'a') -0x7ffeca67ca40->Object::Object { 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x7ffeca67ca40, 0x7ffeca67caf0 } -Shifting token 'a' (0x7ffeca67ca40 'a') -0x5571ffe55ee0->Object::Object { 0x7ffeca67ca40 } -0x7ffeca67ca40->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67ca40 } +0x7ffd9e90cc87->Object::Object { } +0x7ffd9e90cd10->Object::Object { 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'a' (0x7ffd9e90cd10 'a') +0x7ffd9e90cc60->Object::Object { 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x7ffd9e90cc60, 0x7ffd9e90cd10 } +Shifting token 'a' (0x7ffd9e90cc60 'a') +0x55cba242fee0->Object::Object { 0x7ffd9e90cc60 } +0x7ffd9e90cc60->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cc60 } Entering state 2 Stack now 0 2 -0x7ffeca67cb10->Object::Object { 0x5571ffe55ee0 } +0x7ffd9e90cd30->Object::Object { 0x55cba242fee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5571ffe55ee0 'a') --> $$ = nterm item (0x7ffeca67cb10 'a') -0x5571ffe55ee0->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67cb10 } -0x5571ffe55ee0->Object::Object { 0x7ffeca67cb10 } -0x7ffeca67cb10->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67cb10 } + $1 = token 'a' (0x55cba242fee0 'a') +-> $$ = nterm item (0x7ffd9e90cd30 'a') +0x55cba242fee0->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cd30 } +0x55cba242fee0->Object::Object { 0x7ffd9e90cd30 } +0x7ffd9e90cd30->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cd30 } Entering state 11 Stack now 0 11 Reading a token -0x7ffeca67ca67->Object::Object { 0x5571ffe55ee0 } -0x7ffeca67caf0->Object::Object { 0x5571ffe55ee0, 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'a' (0x7ffeca67caf0 'a') -0x7ffeca67ca40->Object::Object { 0x5571ffe55ee0, 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67ca40, 0x7ffeca67caf0 } -Shifting token 'a' (0x7ffeca67ca40 'a') -0x5571ffe55f00->Object::Object { 0x5571ffe55ee0, 0x7ffeca67ca40 } -0x7ffeca67ca40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca40 } +0x7ffd9e90cc87->Object::Object { 0x55cba242fee0 } +0x7ffd9e90cd10->Object::Object { 0x55cba242fee0, 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'a' (0x7ffd9e90cd10 'a') +0x7ffd9e90cc60->Object::Object { 0x55cba242fee0, 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cc60, 0x7ffd9e90cd10 } +Shifting token 'a' (0x7ffd9e90cc60 'a') +0x55cba242ff00->Object::Object { 0x55cba242fee0, 0x7ffd9e90cc60 } +0x7ffd9e90cc60->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc60 } Entering state 2 Stack now 0 11 2 -0x7ffeca67cb10->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00 } +0x7ffd9e90cd30->Object::Object { 0x55cba242fee0, 0x55cba242ff00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5571ffe55f00 'a') --> $$ = nterm item (0x7ffeca67cb10 'a') -0x5571ffe55f00->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67cb10 } -0x5571ffe55f00->Object::Object { 0x5571ffe55ee0, 0x7ffeca67cb10 } -0x7ffeca67cb10->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67cb10 } + $1 = token 'a' (0x55cba242ff00 'a') +-> $$ = nterm item (0x7ffd9e90cd30 'a') +0x55cba242ff00->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd30 } +0x55cba242ff00->Object::Object { 0x55cba242fee0, 0x7ffd9e90cd30 } +0x7ffd9e90cd30->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd30 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffeca67ca67->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00 } -0x7ffeca67caf0->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'a' (0x7ffeca67caf0 'a') -0x7ffeca67ca40->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca40, 0x7ffeca67caf0 } -Shifting token 'a' (0x7ffeca67ca40 'a') -0x5571ffe55f20->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67ca40 } -0x7ffeca67ca40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca40 } +0x7ffd9e90cc87->Object::Object { 0x55cba242fee0, 0x55cba242ff00 } +0x7ffd9e90cd10->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'a' (0x7ffd9e90cd10 'a') +0x7ffd9e90cc60->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc60, 0x7ffd9e90cd10 } +Shifting token 'a' (0x7ffd9e90cc60 'a') +0x55cba242ff20->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cc60 } +0x7ffd9e90cc60->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc60 } Entering state 2 Stack now 0 11 11 2 -0x7ffeca67cb10->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20 } +0x7ffd9e90cd30->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5571ffe55f20 'a') --> $$ = nterm item (0x7ffeca67cb10 'a') -0x5571ffe55f20->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67cb10 } -0x5571ffe55f20->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67cb10 } -0x7ffeca67cb10->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67cb10 } + $1 = token 'a' (0x55cba242ff20 'a') +-> $$ = nterm item (0x7ffd9e90cd30 'a') +0x55cba242ff20->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd30 } +0x55cba242ff20->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd30 } +0x7ffd9e90cd30->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd30 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffeca67ca67->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20 } -0x7ffeca67caf0->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'a' (0x7ffeca67caf0 'a') -0x7ffeca67ca40->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca40, 0x7ffeca67caf0 } -Shifting token 'a' (0x7ffeca67ca40 'a') -0x5571ffe55f40->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67ca40 } -0x7ffeca67ca40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67ca40 } +0x7ffd9e90cc87->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20 } +0x7ffd9e90cd10->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'a' (0x7ffd9e90cd10 'a') +0x7ffd9e90cc60->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc60, 0x7ffd9e90cd10 } +Shifting token 'a' (0x7ffd9e90cc60 'a') +0x55cba242ff40->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cc60 } +0x7ffd9e90cc60->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cc60 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffeca67cb10->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40 } +0x7ffd9e90cd30->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5571ffe55f40 'a') --> $$ = nterm item (0x7ffeca67cb10 'a') -0x5571ffe55f40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67cb10 } -0x5571ffe55f40->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67cb10 } -0x7ffeca67cb10->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67cb10 } + $1 = token 'a' (0x55cba242ff40 'a') +-> $$ = nterm item (0x7ffd9e90cd30 'a') +0x55cba242ff40->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cd30 } +0x55cba242ff40->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd30 } +0x7ffd9e90cd30->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cd30 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffeca67ca67->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40 } -0x7ffeca67caf0->Object::Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67ca67 } -0x7ffeca67ca67->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67ca67, 0x7ffeca67caf0 } -Next token is token 'p' (0x7ffeca67caf0 'p'Exception caught: cleaning lookahead and stack -0x5571ffe55f40->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x5571ffe55f40, 0x7ffeca67caf0 } -0x5571ffe55f20->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x5571ffe55f20, 0x7ffeca67caf0 } -0x5571ffe55f00->Object::~Object { 0x5571ffe55ee0, 0x5571ffe55f00, 0x7ffeca67caf0 } -0x5571ffe55ee0->Object::~Object { 0x5571ffe55ee0, 0x7ffeca67caf0 } -0x7ffeca67caf0->Object::~Object { 0x7ffeca67caf0 } +0x7ffd9e90cc87->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40 } +0x7ffd9e90cd10->Object::Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cc87 } +0x7ffd9e90cc87->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cc87, 0x7ffd9e90cd10 } +Next token is token 'p' (0x7ffd9e90cd10 'p'Exception caught: cleaning lookahead and stack +0x55cba242ff40->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x55cba242ff40, 0x7ffd9e90cd10 } +0x55cba242ff20->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x55cba242ff20, 0x7ffd9e90cd10 } +0x55cba242ff00->Object::~Object { 0x55cba242fee0, 0x55cba242ff00, 0x7ffd9e90cd10 } +0x55cba242fee0->Object::~Object { 0x55cba242fee0, 0x7ffd9e90cd10 } +0x7ffd9e90cd10->Object::~Object { 0x7ffd9e90cd10 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr -stdout: +./c++.at:1065: $PREPARSER ./input < in +703. d.at:103: stdout: + skipped (d.at:106) exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae + stderr: -701. javapush.at:172: testing Trivial Push Parser with api.push-pull verification ... +704. d.at:138: testing D parser class api.token.raw true by default ... +error: invalid character +stderr: +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y exception caught: syntax error ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./javapush.at:181: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=pull -o Main.java input.y +======== Testing with C++ standard flags: '' +stderr: +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ./c++.at:1362: $PREPARSER ./input aaaaE stderr: +stdout: +./c++.at:1555: ./check exception caught: syntax error, unexpected end of file, expecting 'a' -702. javapush.at:217: testing Trivial Push Parser with %initial-action ... -./javapush.at:227: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./javapush.at:182: grep -c '^.*public boolean parse().*$' Main.java +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y ./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./javapush.at:187: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java +./d.at:141: grep '[mb]4_' YYParser.y stderr: stdout: -./c++.at:857: $PREPARSER ./input -./javapush.at:191: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=both -o Main.java input.y +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ... ./c++.at:1362: $PREPARSER ./input aaaaR +704. d.at:138: stderr: +./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y + skipped (d.at:141) +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:410: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./javapush.at:228: grep -c '^System.err.println("Initial action invoked");$' Main.java stdout: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:851: $PREPARSER ./input -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS ./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +706. cxx-type.at:415: testing GLR: Resolve ambiguity, impure, locations ... ./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -702. javapush.at:217: skipped (javapush.at:230) -./javapush.at:192: grep -c '^.*public boolean parse().*$' Main.java -./javapush.at:195: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java - -./javapush.at:199: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Dapi.push-pull=push -o Main.java input.y -stderr: -stdout: -./javapush.at:200: grep -c '^.*public boolean parse().*$' Main.java -./c++.at:1361: $PREPARSER ./input aaaas -./javapush.at:203: grep -c '^.*public int push_parse(int yylextoken, Object yylexval).*$' Main.java -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -701. javapush.at:172: skipped (javapush.at:207) -703. d.at:103: testing D parser class extends and implements ... -./d.at:106: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:1361: $PREPARSER ./input aaaal +./cxx-type.at:416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS stderr: stdout: ./c++.at:1363: $PREPARSER ./input aaaas stderr: -./c++.at:1361: $PREPARSER ./input i exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./d.at:106: grep '[mb]4_' YYParser.y +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./c++.at:1363: $PREPARSER ./input aaaal +======== Testing with C++ standard flags: '' stderr: +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS exception caught: yylex ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input i -./c++.at:1361: $PREPARSER ./input aaaap stderr: +exception caught: initial-action stderr: -703. d.at:103: exception caught: initial-action ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -704. d.at:138: testing D parser class api.token.raw true by default ... -./d.at:141: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -Wno-deprecated YYParser.y -./c++.at:574: $here/modern +./c++.at:1502: $PREPARSER ./parser stderr: - skipped (d.at:106) -stdout: -Modern C++: 202100 -./c++.at:574: $PREPARSER ./list +./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +690. c++.at:1422: ok ./c++.at:1363: $PREPARSER ./input aaaap stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -Destroy: "" -Destroy: "" -Destroy: 1 -Destroy: "" -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: "" -Destroy: 3 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: () -Destroy: 5 -Destroy: () -Destroy: "" -Destroy: "" -Destroy: () -Destroy: (0, 1, 2, 4, 6) -./c++.at:1361: $PREPARSER ./input --debug aaaap -stderr: -./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: - -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55ceb864fb40->Object::Object { } -Next token is token 'a' (0x55ceb864fb40 'a') -Shifting token 'a' (0x55ceb864fb40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ceb864fb40 'a') --> $$ = nterm item (0x55ceb864fb40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x55ceb864fb90->Object::Object { 0x55ceb864fb40 } -Next token is token 'a' (0x55ceb864fb90 'a') -Shifting token 'a' (0x55ceb864fb90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ceb864fb90 'a') --> $$ = nterm item (0x55ceb864fb90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x55ceb864fbe0->Object::Object { 0x55ceb864fb40, 0x55ceb864fb90 } -Next token is token 'a' (0x55ceb864fbe0 'a') -Shifting token 'a' (0x55ceb864fbe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ceb864fbe0 'a') --> $$ = nterm item (0x55ceb864fbe0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x55ceb864fc30->Object::Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0 } -Next token is token 'a' (0x55ceb864fc30 'a') -Shifting token 'a' (0x55ceb864fc30 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ceb864fc30 'a') --> $$ = nterm item (0x55ceb864fc30 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x55ceb864fc80->Object::Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0, 0x55ceb864fc30 } -Next token is token 'p' (0x55ceb864fc80 'p'Exception caught: cleaning lookahead and stack -0x55ceb864fc80->Object::~Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0, 0x55ceb864fc30, 0x55ceb864fc80 } -0x55ceb864fc30->Object::~Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0, 0x55ceb864fc30 } -0x55ceb864fbe0->Object::~Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0 } -0x55ceb864fb90->Object::~Object { 0x55ceb864fb40, 0x55ceb864fb90 } -0x55ceb864fb40->Object::~Object { 0x55ceb864fb40 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffd876f4fbf->Object::Object { } -0x7ffd876f5090->Object::Object { 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'a' (0x7ffd876f5090 'a') -0x7ffd876f4fe0->Object::Object { 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x7ffd876f4fe0, 0x7ffd876f5090 } -Shifting token 'a' (0x7ffd876f4fe0 'a') -0x55f5c4d09ee0->Object::Object { 0x7ffd876f4fe0 } -0x7ffd876f4fe0->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f4fe0 } +0x7ffc5f68fc7f->Object::Object { } +0x7ffc5f68fd50->Object::Object { 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'a' (0x7ffc5f68fd50 'a') +0x7ffc5f68fca0->Object::Object { 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x7ffc5f68fca0, 0x7ffc5f68fd50 } +Shifting token 'a' (0x7ffc5f68fca0 'a') +0x55bfece4fee0->Object::Object { 0x7ffc5f68fca0 } +0x7ffc5f68fca0->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fca0 } Entering state 1 Stack now 0 1 -0x7ffd876f50b0->Object::Object { 0x55f5c4d09ee0 } +0x7ffc5f68fd70->Object::Object { 0x55bfece4fee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f5c4d09ee0 'a') --> $$ = nterm item (0x7ffd876f50b0 'a') -0x55f5c4d09ee0->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f50b0 } -0x55f5c4d09ee0->Object::Object { 0x7ffd876f50b0 } -0x7ffd876f50b0->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f50b0 } + $1 = token 'a' (0x55bfece4fee0 'a') +-> $$ = nterm item (0x7ffc5f68fd70 'a') +0x55bfece4fee0->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fd70 } +0x55bfece4fee0->Object::Object { 0x7ffc5f68fd70 } +0x7ffc5f68fd70->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fd70 } Entering state 10 Stack now 0 10 Reading a token -0x7ffd876f4fbf->Object::Object { 0x55f5c4d09ee0 } -0x7ffd876f5090->Object::Object { 0x55f5c4d09ee0, 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'a' (0x7ffd876f5090 'a') -0x7ffd876f4fe0->Object::Object { 0x55f5c4d09ee0, 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f4fe0, 0x7ffd876f5090 } -Shifting token 'a' (0x7ffd876f4fe0 'a') -0x55f5c4d09f00->Object::Object { 0x55f5c4d09ee0, 0x7ffd876f4fe0 } -0x7ffd876f4fe0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fe0 } +0x7ffc5f68fc7f->Object::Object { 0x55bfece4fee0 } +0x7ffc5f68fd50->Object::Object { 0x55bfece4fee0, 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'a' (0x7ffc5f68fd50 'a') +0x7ffc5f68fca0->Object::Object { 0x55bfece4fee0, 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fca0, 0x7ffc5f68fd50 } +Shifting token 'a' (0x7ffc5f68fca0 'a') +0x55bfece4ff00->Object::Object { 0x55bfece4fee0, 0x7ffc5f68fca0 } +0x7ffc5f68fca0->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fca0 } Entering state 1 Stack now 0 10 1 -0x7ffd876f50b0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00 } +0x7ffc5f68fd70->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f5c4d09f00 'a') --> $$ = nterm item (0x7ffd876f50b0 'a') -0x55f5c4d09f00->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f50b0 } -0x55f5c4d09f00->Object::Object { 0x55f5c4d09ee0, 0x7ffd876f50b0 } -0x7ffd876f50b0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f50b0 } + $1 = token 'a' (0x55bfece4ff00 'a') +-> $$ = nterm item (0x7ffc5f68fd70 'a') +0x55bfece4ff00->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd70 } +0x55bfece4ff00->Object::Object { 0x55bfece4fee0, 0x7ffc5f68fd70 } +0x7ffc5f68fd70->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd70 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffd876f4fbf->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00 } -0x7ffd876f5090->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'a' (0x7ffd876f5090 'a') -0x7ffd876f4fe0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fe0, 0x7ffd876f5090 } -Shifting token 'a' (0x7ffd876f4fe0 'a') -0x55f5c4d09f20->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fe0 } -0x7ffd876f4fe0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fe0 } +0x7ffc5f68fc7f->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00 } +0x7ffc5f68fd50->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'a' (0x7ffc5f68fd50 'a') +0x7ffc5f68fca0->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fca0, 0x7ffc5f68fd50 } +Shifting token 'a' (0x7ffc5f68fca0 'a') +0x55bfece4ff20->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fca0 } +0x7ffc5f68fca0->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fca0 } Entering state 1 Stack now 0 10 10 1 -0x7ffd876f50b0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20 } +0x7ffc5f68fd70->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f5c4d09f20 'a') --> $$ = nterm item (0x7ffd876f50b0 'a') -0x55f5c4d09f20->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f50b0 } -0x55f5c4d09f20->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f50b0 } -0x7ffd876f50b0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f50b0 } + $1 = token 'a' (0x55bfece4ff20 'a') +-> $$ = nterm item (0x7ffc5f68fd70 'a') +0x55bfece4ff20->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd70 } +0x55bfece4ff20->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd70 } +0x7ffc5f68fd70->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd70 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffd876f4fbf->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20 } -0x7ffd876f5090->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'a' (0x7ffd876f5090 'a') -0x7ffd876f4fe0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fe0, 0x7ffd876f5090 } -Shifting token 'a' (0x7ffd876f4fe0 'a') -0x55f5c4d09f40->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fe0 } -0x7ffd876f4fe0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f4fe0 } +0x7ffc5f68fc7f->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20 } +0x7ffc5f68fd50->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'a' (0x7ffc5f68fd50 'a') +0x7ffc5f68fca0->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fca0, 0x7ffc5f68fd50 } +Shifting token 'a' (0x7ffc5f68fca0 'a') +0x55bfece4ff40->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fca0 } +0x7ffc5f68fca0->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fca0 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffd876f50b0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40 } +0x7ffc5f68fd70->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f5c4d09f40 'a') --> $$ = nterm item (0x7ffd876f50b0 'a') -0x55f5c4d09f40->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f50b0 } -0x55f5c4d09f40->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f50b0 } -0x7ffd876f50b0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f50b0 } + $1 = token 'a' (0x55bfece4ff40 'a') +-> $$ = nterm item (0x7ffc5f68fd70 'a') +0x55bfece4ff40->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fd70 } +0x55bfece4ff40->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd70 } +0x7ffc5f68fd70->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fd70 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffd876f4fbf->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40 } -0x7ffd876f5090->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'p' (0x7ffd876f5090 'p'Exception caught: cleaning lookahead and stack -0x55f5c4d09f40->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f5090 } -0x55f5c4d09f20->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f5090 } -0x55f5c4d09f00->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f5090 } -0x55f5c4d09ee0->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x7ffd876f5090 } +0x7ffc5f68fc7f->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40 } +0x7ffc5f68fd50->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'p' (0x7ffc5f68fd50 'p'Exception caught: cleaning lookahead and stack +0x55bfece4ff40->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fd50 } +0x55bfece4ff20->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd50 } +0x55bfece4ff00->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd50 } +0x55bfece4fee0->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x7ffc5f68fd50 } exception caught: printer end { } ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -674. c++.at:574: ok -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55ceb864fb40->Object::Object { } -Next token is token 'a' (0x55ceb864fb40 'a') -Shifting token 'a' (0x55ceb864fb40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ceb864fb40 'a') --> $$ = nterm item (0x55ceb864fb40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x55ceb864fb90->Object::Object { 0x55ceb864fb40 } -Next token is token 'a' (0x55ceb864fb90 'a') -Shifting token 'a' (0x55ceb864fb90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ceb864fb90 'a') --> $$ = nterm item (0x55ceb864fb90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x55ceb864fbe0->Object::Object { 0x55ceb864fb40, 0x55ceb864fb90 } -Next token is token 'a' (0x55ceb864fbe0 'a') -Shifting token 'a' (0x55ceb864fbe0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ceb864fbe0 'a') --> $$ = nterm item (0x55ceb864fbe0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x55ceb864fc30->Object::Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0 } -Next token is token 'a' (0x55ceb864fc30 'a') -Shifting token 'a' (0x55ceb864fc30 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55ceb864fc30 'a') --> $$ = nterm item (0x55ceb864fc30 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x55ceb864fc80->Object::Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0, 0x55ceb864fc30 } -Next token is token 'p' (0x55ceb864fc80 'p'Exception caught: cleaning lookahead and stack -0x55ceb864fc80->Object::~Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0, 0x55ceb864fc30, 0x55ceb864fc80 } -0x55ceb864fc30->Object::~Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0, 0x55ceb864fc30 } -0x55ceb864fbe0->Object::~Object { 0x55ceb864fb40, 0x55ceb864fb90, 0x55ceb864fbe0 } -0x55ceb864fb90->Object::~Object { 0x55ceb864fb40, 0x55ceb864fb90 } -0x55ceb864fb40->Object::~Object { 0x55ceb864fb40 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr -stdout: + stderr: -./d.at:141: grep '[mb]4_' YYParser.y Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffd876f4fbf->Object::Object { } -0x7ffd876f5090->Object::Object { 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'a' (0x7ffd876f5090 'a') -0x7ffd876f4fe0->Object::Object { 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x7ffd876f4fe0, 0x7ffd876f5090 } -Shifting token 'a' (0x7ffd876f4fe0 'a') -0x55f5c4d09ee0->Object::Object { 0x7ffd876f4fe0 } -0x7ffd876f4fe0->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f4fe0 } +0x7ffc5f68fc7f->Object::Object { } +0x7ffc5f68fd50->Object::Object { 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'a' (0x7ffc5f68fd50 'a') +0x7ffc5f68fca0->Object::Object { 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x7ffc5f68fca0, 0x7ffc5f68fd50 } +Shifting token 'a' (0x7ffc5f68fca0 'a') +0x55bfece4fee0->Object::Object { 0x7ffc5f68fca0 } +0x7ffc5f68fca0->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fca0 } Entering state 1 Stack now 0 1 -0x7ffd876f50b0->Object::Object { 0x55f5c4d09ee0 } +0x7ffc5f68fd70->Object::Object { 0x55bfece4fee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f5c4d09ee0 'a') --> $$ = nterm item (0x7ffd876f50b0 'a') -0x55f5c4d09ee0->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f50b0 } -0x55f5c4d09ee0->Object::Object { 0x7ffd876f50b0 } -0x7ffd876f50b0->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f50b0 } + $1 = token 'a' (0x55bfece4fee0 'a') +-> $$ = nterm item (0x7ffc5f68fd70 'a') +0x55bfece4fee0->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fd70 } +0x55bfece4fee0->Object::Object { 0x7ffc5f68fd70 } +0x7ffc5f68fd70->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fd70 } Entering state 10 Stack now 0 10 Reading a token -0x7ffd876f4fbf->Object::Object { 0x55f5c4d09ee0 } -0x7ffd876f5090->Object::Object { 0x55f5c4d09ee0, 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'a' (0x7ffd876f5090 'a') -0x7ffd876f4fe0->Object::Object { 0x55f5c4d09ee0, 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f4fe0, 0x7ffd876f5090 } -Shifting token 'a' (0x7ffd876f4fe0 'a') -0x55f5c4d09f00->Object::Object { 0x55f5c4d09ee0, 0x7ffd876f4fe0 } -0x7ffd876f4fe0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fe0 } +0x7ffc5f68fc7f->Object::Object { 0x55bfece4fee0 } +0x7ffc5f68fd50->Object::Object { 0x55bfece4fee0, 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'a' (0x7ffc5f68fd50 'a') +0x7ffc5f68fca0->Object::Object { 0x55bfece4fee0, 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fca0, 0x7ffc5f68fd50 } +Shifting token 'a' (0x7ffc5f68fca0 'a') +0x55bfece4ff00->Object::Object { 0x55bfece4fee0, 0x7ffc5f68fca0 } +0x7ffc5f68fca0->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fca0 } Entering state 1 Stack now 0 10 1 -0x7ffd876f50b0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00 } +0x7ffc5f68fd70->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f5c4d09f00 'a') --> $$ = nterm item (0x7ffd876f50b0 'a') -0x55f5c4d09f00->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f50b0 } -0x55f5c4d09f00->Object::Object { 0x55f5c4d09ee0, 0x7ffd876f50b0 } -0x7ffd876f50b0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f50b0 } + $1 = token 'a' (0x55bfece4ff00 'a') +-> $$ = nterm item (0x7ffc5f68fd70 'a') +0x55bfece4ff00->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd70 } +0x55bfece4ff00->Object::Object { 0x55bfece4fee0, 0x7ffc5f68fd70 } +0x7ffc5f68fd70->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd70 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffd876f4fbf->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00 } -0x7ffd876f5090->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'a' (0x7ffd876f5090 'a') -0x7ffd876f4fe0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fe0, 0x7ffd876f5090 } -Shifting token 'a' (0x7ffd876f4fe0 'a') -0x55f5c4d09f20->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f4fe0 } -0x7ffd876f4fe0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fe0 } +0x7ffc5f68fc7f->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00 } +0x7ffc5f68fd50->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'a' (0x7ffc5f68fd50 'a') +0x7ffc5f68fca0->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fca0, 0x7ffc5f68fd50 } +Shifting token 'a' (0x7ffc5f68fca0 'a') +0x55bfece4ff20->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fca0 } +0x7ffc5f68fca0->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fca0 } Entering state 1 Stack now 0 10 10 1 -0x7ffd876f50b0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20 } +0x7ffc5f68fd70->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f5c4d09f20 'a') --> $$ = nterm item (0x7ffd876f50b0 'a') -0x55f5c4d09f20->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f50b0 } -0x55f5c4d09f20->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f50b0 } -0x7ffd876f50b0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f50b0 } + $1 = token 'a' (0x55bfece4ff20 'a') +-> $$ = nterm item (0x7ffc5f68fd70 'a') +0x55bfece4ff20->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd70 } +0x55bfece4ff20->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd70 } +0x7ffc5f68fd70->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd70 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffd876f4fbf->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20 } -0x7ffd876f5090->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'a' (0x7ffd876f5090 'a') -0x7ffd876f4fe0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fe0, 0x7ffd876f5090 } -Shifting token 'a' (0x7ffd876f4fe0 'a') -0x55f5c4d09f40->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f4fe0 } -0x7ffd876f4fe0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f4fe0 } +0x7ffc5f68fc7f->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20 } +0x7ffc5f68fd50->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'a' (0x7ffc5f68fd50 'a') +0x7ffc5f68fca0->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fca0, 0x7ffc5f68fd50 } +Shifting token 'a' (0x7ffc5f68fca0 'a') +0x55bfece4ff40->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fca0 } +0x7ffc5f68fca0->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fca0 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffd876f50b0->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40 } +0x7ffc5f68fd70->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f5c4d09f40 'a') --> $$ = nterm item (0x7ffd876f50b0 'a') -0x55f5c4d09f40->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f50b0 } -0x55f5c4d09f40->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f50b0 } -0x7ffd876f50b0->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f50b0 } + $1 = token 'a' (0x55bfece4ff40 'a') +-> $$ = nterm item (0x7ffc5f68fd70 'a') +0x55bfece4ff40->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fd70 } +0x55bfece4ff40->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd70 } +0x7ffc5f68fd70->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fd70 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffd876f4fbf->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40 } -0x7ffd876f5090->Object::Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f4fbf } -0x7ffd876f4fbf->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f4fbf, 0x7ffd876f5090 } -Next token is token 'p' (0x7ffd876f5090 'p'Exception caught: cleaning lookahead and stack -0x55f5c4d09f40->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x55f5c4d09f40, 0x7ffd876f5090 } -0x55f5c4d09f20->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x55f5c4d09f20, 0x7ffd876f5090 } -0x55f5c4d09f00->Object::~Object { 0x55f5c4d09ee0, 0x55f5c4d09f00, 0x7ffd876f5090 } -0x55f5c4d09ee0->Object::~Object { 0x55f5c4d09ee0, 0x7ffd876f5090 } -0x7ffd876f5090->Object::~Object { 0x7ffd876f5090 } +0x7ffc5f68fc7f->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40 } +0x7ffc5f68fd50->Object::Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fc7f } +0x7ffc5f68fc7f->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fc7f, 0x7ffc5f68fd50 } +Next token is token 'p' (0x7ffc5f68fd50 'p'Exception caught: cleaning lookahead and stack +0x55bfece4ff40->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x55bfece4ff40, 0x7ffc5f68fd50 } +0x55bfece4ff20->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x55bfece4ff20, 0x7ffc5f68fd50 } +0x55bfece4ff00->Object::~Object { 0x55bfece4fee0, 0x55bfece4ff00, 0x7ffc5f68fd50 } +0x55bfece4fee0->Object::~Object { 0x55bfece4fee0, 0x7ffc5f68fd50 } +0x7ffc5f68fd50->Object::~Object { 0x7ffc5f68fd50 } exception caught: printer end { } -exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae ./c++.at:1363: grep '^exception caught: printer$' stderr stdout: -stdout: -stderr: -704. d.at:138: exception caught: syntax error exception caught: printer ./c++.at:1363: $PREPARSER ./input aaaae -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - stderr: exception caught: syntax error - skipped (d.at:141) ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./c++.at:1361: $PREPARSER ./input aaaaE stderr: ./c++.at:1363: $PREPARSER ./input aaaaE -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaT -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1363: $PREPARSER ./input aaaaR -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -707. cxx-type.at:420: testing GLR: Resolve ambiguity, pure, no locations ... -./c++.at:1361: $PREPARSER ./input aaaaR -./cxx-type.at:421: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -stderr: -706. cxx-type.at:415: testing GLR: Resolve ambiguity, impure, locations ... -./cxx-type.at:416: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -705. cxx-type.at:409: testing GLR: Resolve ambiguity, impure, no locations ... -./cxx-type.at:410: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:416: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:410: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -stderr: -stdout: -./cxx-type.at:421: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./c++.at:1555: $PREPARSER ./test -stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -stdout: -./c++.at:1555: ./check --std=c++98 not supported -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stderr: -stdout: -stdout: -./c++.at:573: $here/modern -stdout: -./c++.at:1501: $CXX $CPPFLAGS $CXXFLAGS -Iinclude $LDFLAGS -o parser x[12].o main.cc $LIBS -Modern C++: 202100 -./c++.at:573: $PREPARSER ./list -stderr: -Destroy: "0" -Destroy: "0" -Destroy: 1 -Destroy: "1" -Destroy: (0) -Destroy: "2" -Destroy: "2" -Destroy: (0, 1) -Destroy: "" -Destroy: 3 -Destroy: (0, 1, 2) -Destroy: "4" -Destroy: "4" -Destroy: (0, 1, 2) -Destroy: (0, 1, 2, 4) -Destroy: 5 -Destroy: (0, 1, 2, 4) -Destroy: "6" -Destroy: "6" -Destroy: (0, 1, 2, 4) -Destroy: (0, 1, 2, 4, 6) -./c++.at:573: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -673. c++.at:573: ok -stderr: -stdout: -./c++.at:1555: ./check --std=c++03 not supported -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS - -stderr: -stdout: -708. cxx-type.at:426: testing GLR: Resolve ambiguity, pure, locations ... -./c++.at:1065: $PREPARSER ./input < in -./cxx-type.at:427: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -stderr: -stderr: stdout: -error: invalid expression -caught error -error: invalid character -caught error -======== Testing with C++ standard flags: '' -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -stdout: ./c++.at:1360: $PREPARSER ./input aaaas +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: reduction -stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1065: $PREPARSER ./input < in -./c++.at:1555: ./check --std=c++11 not supported -======== Testing with C++ standard flags: '' ./c++.at:1360: $PREPARSER ./input aaaal stderr: -stderr: -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -error: invalid expression +./c++.at:1363: $PREPARSER ./input aaaaT exception caught: yylex -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -types.y:87.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples ./c++.at:1360: $PREPARSER ./input i -./cxx-type.at:427: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./c++.at:1065: $PREPARSER ./input < in +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: initial-action ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaap +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ./c++.at:1360: $PREPARSER ./input --debug aaaap +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +707. cxx-type.at:420: testing GLR: Resolve ambiguity, pure, no locations ... stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x55fa6e155b40->Object::Object { } -Next token is token 'a' (0x55fa6e155b40 'a') -Shifting token 'a' (0x55fa6e155b40 'a') +0x55c90d83eb40->Object::Object { } +Next token is token 'a' (0x55c90d83eb40 'a') +Shifting token 'a' (0x55c90d83eb40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fa6e155b40 'a') --> $$ = nterm item (0x55fa6e155b40 'a') + $1 = token 'a' (0x55c90d83eb40 'a') +-> $$ = nterm item (0x55c90d83eb40 'a') Entering state 11 Stack now 0 11 Reading a token -0x55fa6e155b90->Object::Object { 0x55fa6e155b40 } -Next token is token 'a' (0x55fa6e155b90 'a') -Shifting token 'a' (0x55fa6e155b90 'a') +0x55c90d83eb90->Object::Object { 0x55c90d83eb40 } +Next token is token 'a' (0x55c90d83eb90 'a') +Shifting token 'a' (0x55c90d83eb90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fa6e155b90 'a') --> $$ = nterm item (0x55fa6e155b90 'a') + $1 = token 'a' (0x55c90d83eb90 'a') +-> $$ = nterm item (0x55c90d83eb90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x55fa6e155be0->Object::Object { 0x55fa6e155b40, 0x55fa6e155b90 } -Next token is token 'a' (0x55fa6e155be0 'a') -Shifting token 'a' (0x55fa6e155be0 'a') +0x55c90d83ebe0->Object::Object { 0x55c90d83eb40, 0x55c90d83eb90 } +Next token is token 'a' (0x55c90d83ebe0 'a') +Shifting token 'a' (0x55c90d83ebe0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fa6e155be0 'a') --> $$ = nterm item (0x55fa6e155be0 'a') + $1 = token 'a' (0x55c90d83ebe0 'a') +-> $$ = nterm item (0x55c90d83ebe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x55fa6e155c30->Object::Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0 } -Next token is token 'a' (0x55fa6e155c30 'a') -Shifting token 'a' (0x55fa6e155c30 'a') +0x55c90d83ec30->Object::Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0 } +Next token is token 'a' (0x55c90d83ec30 'a') +Shifting token 'a' (0x55c90d83ec30 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fa6e155c30 'a') --> $$ = nterm item (0x55fa6e155c30 'a') + $1 = token 'a' (0x55c90d83ec30 'a') +-> $$ = nterm item (0x55c90d83ec30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x55fa6e155c80->Object::Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0, 0x55fa6e155c30 } -Next token is token 'p' (0x55fa6e155c80 'p'Exception caught: cleaning lookahead and stack -0x55fa6e155c80->Object::~Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0, 0x55fa6e155c30, 0x55fa6e155c80 } -0x55fa6e155c30->Object::~Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0, 0x55fa6e155c30 } -0x55fa6e155be0->Object::~Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0 } -0x55fa6e155b90->Object::~Object { 0x55fa6e155b40, 0x55fa6e155b90 } -0x55fa6e155b40->Object::~Object { 0x55fa6e155b40 } +0x55c90d83ec80->Object::Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0, 0x55c90d83ec30 } +Next token is token 'p' (0x55c90d83ec80 'p'Exception caught: cleaning lookahead and stack +0x55c90d83ec80->Object::~Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0, 0x55c90d83ec30, 0x55c90d83ec80 } +0x55c90d83ec30->Object::~Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0, 0x55c90d83ec30 } +0x55c90d83ebe0->Object::~Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0 } +0x55c90d83eb90->Object::~Object { 0x55c90d83eb40, 0x55c90d83eb90 } +0x55c90d83eb40->Object::~Object { 0x55c90d83eb40 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: +./cxx-type.at:421: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y Starting parse Entering state 0 Stack now 0 Reading a token -0x55fa6e155b40->Object::Object { } -Next token is token 'a' (0x55fa6e155b40 'a') -Shifting token 'a' (0x55fa6e155b40 'a') +0x55c90d83eb40->Object::Object { } +Next token is token 'a' (0x55c90d83eb40 'a') +Shifting token 'a' (0x55c90d83eb40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fa6e155b40 'a') --> $$ = nterm item (0x55fa6e155b40 'a') + $1 = token 'a' (0x55c90d83eb40 'a') +-> $$ = nterm item (0x55c90d83eb40 'a') Entering state 11 Stack now 0 11 Reading a token -0x55fa6e155b90->Object::Object { 0x55fa6e155b40 } -Next token is token 'a' (0x55fa6e155b90 'a') -Shifting token 'a' (0x55fa6e155b90 'a') +0x55c90d83eb90->Object::Object { 0x55c90d83eb40 } +Next token is token 'a' (0x55c90d83eb90 'a') +Shifting token 'a' (0x55c90d83eb90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fa6e155b90 'a') --> $$ = nterm item (0x55fa6e155b90 'a') + $1 = token 'a' (0x55c90d83eb90 'a') +-> $$ = nterm item (0x55c90d83eb90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x55fa6e155be0->Object::Object { 0x55fa6e155b40, 0x55fa6e155b90 } -Next token is token 'a' (0x55fa6e155be0 'a') -Shifting token 'a' (0x55fa6e155be0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fa6e155be0 'a') --> $$ = nterm item (0x55fa6e155be0 'a') -Entering state 11 -Stack now 0 11 11 11 +0x55c90d83ebe0->Object::Object { 0x55c90d83eb40, 0x55c90d83eb90 } +Next token is token 'a' (0x55c90d83ebe0 'a') +Shifting token 'a' (0x55c90d83ebe0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c90d83ebe0 'a') +-> $$ = nterm item (0x55c90d83ebe0 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x55c90d83ec30->Object::Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0 } +Next token is token 'a' (0x55c90d83ec30 'a') +Shifting token 'a' (0x55c90d83ec30 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c90d83ec30 'a') +-> $$ = nterm item (0x55c90d83ec30 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x55c90d83ec80->Object::Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0, 0x55c90d83ec30 } +Next token is token 'p' (0x55c90d83ec80 'p'Exception caught: cleaning lookahead and stack +0x55c90d83ec80->Object::~Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0, 0x55c90d83ec30, 0x55c90d83ec80 } +0x55c90d83ec30->Object::~Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0, 0x55c90d83ec30 } +0x55c90d83ebe0->Object::~Object { 0x55c90d83eb40, 0x55c90d83eb90, 0x55c90d83ebe0 } +0x55c90d83eb90->Object::~Object { 0x55c90d83eb40, 0x55c90d83eb90 } +0x55c90d83eb40->Object::~Object { 0x55c90d83eb40 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:574: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o modern modern.cc $LIBS +./c++.at:1360: $PREPARSER ./input aaaaT +stderr: +stderr: +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:421: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./cxx-type.at:412: $PREPARSER ./types test-input +stderr: +syntax error +./cxx-type.at:412: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:851: $PREPARSER ./input +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./cxx-type.at:412: $PREPARSER ./types -p test-input +stderr: +Starting parse +Entering state 0 +Reducing stack 0 by rule 1 (line 64): +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 23 +Reducing stack 0 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 22 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 29 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 30 +Reducing stack 0 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 14 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 24 +Reading a token +Next token is token ';' () +Reducing stack 0 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '+' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '+' () +Stack 1 dies. +Removing dead stacks. +On stack 0, shifting token '+' () +Stack 0 now in state 15 +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Returning to deterministic operation. +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token ';' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 23 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 23 +Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '=' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '=' () +On stack 0, shifting token '=' () +Stack 0 now in state 14 +On stack 1, shifting token '=' () +Stack 1 now in state 22 +Stack 0 Entering state 14 +Reading a token +Next token is token ID () +Stack 1 Entering state 22 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token '+' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token '+' () +On stack 0, shifting token '+' () +Stack 0 now in state 15 +On stack 1, shifting token '+' () +Stack 1 now in state 15 +Stack 0 Entering state 15 +Reading a token +Next token is token ID () +Stack 1 Entering state 15 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Stack 0 Entering state 25 +Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token ';' () +Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Stack 0 Entering state 8 +Next token is token ';' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Stack 1 Entering state 25 +Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 30 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 30 +Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 20 +Next token is token ID () +syntax error +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () +Entering state 3 +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ')' () +Error: discarding token ')' () +Reading a token +Next token is token '=' () +Error: discarding token '=' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ';' () +Entering state 3 +Next token is token ';' () +Shifting token ';' () +Entering state 10 +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token '@' () +Shifting token '@' () +Entering state 6 +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () +./cxx-type.at:412: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Reducing stack 0 by rule 1 (line 64): +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 23 +Reducing stack 0 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 11 +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Entering state 13 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 22 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 29 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 30 +Reducing stack 0 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Entering state 9 +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '=' () +Shifting token '=' () +Entering state 14 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 24 +Reading a token +Next token is token ';' () +Reducing stack 0 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '+' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '+' () +Stack 1 dies. +Removing dead stacks. +On stack 0, shifting token '+' () +Stack 0 now in state 15 +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Returning to deterministic operation. +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token TYPENAME () +Shifting token TYPENAME () +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token ';' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 23 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 23 +Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ')' () +Stack 0 Entering state 18 +Next token is token ')' () +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' () +Stack 1 Entering state 21 +Next token is token ')' () +On stack 0, shifting token ')' () +Stack 0 now in state 27 +On stack 1, shifting token ')' () +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '=' () +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '=' () +On stack 0, shifting token '=' () +Stack 0 now in state 14 +On stack 1, shifting token '=' () +Stack 1 now in state 22 +Stack 0 Entering state 14 +Reading a token +Next token is token ID () +Stack 1 Entering state 22 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token '+' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token '+' () +On stack 0, shifting token '+' () +Stack 0 now in state 15 +On stack 1, shifting token '+' () +Stack 1 now in state 15 +Stack 0 Entering state 15 +Reading a token +Next token is token ID () +Stack 1 Entering state 15 +Next token is token ID () +On stack 0, shifting token ID () +Stack 0 now in state 5 +On stack 1, shifting token ID () +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Stack 0 Entering state 25 +Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token ';' () +Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Stack 0 Entering state 8 +Next token is token ';' () +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Stack 1 Entering state 25 +Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token ';' () +On stack 0, shifting token ';' () +Stack 0 now in state 16 +On stack 1, shifting token ';' () +Stack 1 now in state 30 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME () +Stack 1 Entering state 30 +Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME () +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' () +Shifting token '(' () +Entering state 12 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 18 +Reading a token +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 20 +Next token is token ID () +syntax error +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () +Entering state 3 +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ')' () +Error: discarding token ')' () +Reading a token +Next token is token '=' () +Error: discarding token '=' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token '+' () +Error: discarding token '+' () +Reading a token +Next token is token ID () +Error: discarding token ID () +Reading a token +Next token is token ';' () +Entering state 3 +Next token is token ';' () +Shifting token ';' () +Entering state 10 +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 8 +Reading a token +Next token is token '+' () +Shifting token '+' () +Entering state 15 +Reading a token +Next token is token ID () +Shifting token ID () +Entering state 5 +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Entering state 25 +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Entering state 8 Reading a token -0x55fa6e155c30->Object::Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0 } -Next token is token 'a' (0x55fa6e155c30 'a') -Shifting token 'a' (0x55fa6e155c30 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fa6e155c30 'a') --> $$ = nterm item (0x55fa6e155c30 'a') -Entering state 11 -Stack now 0 11 11 11 11 +Next token is token ';' () +Shifting token ';' () +Entering state 16 +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Entering state 7 +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () +Entering state 1 Reading a token -0x55fa6e155c80->Object::Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0, 0x55fa6e155c30 } -Next token is token 'p' (0x55fa6e155c80 'p'Exception caught: cleaning lookahead and stack -0x55fa6e155c80->Object::~Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0, 0x55fa6e155c30, 0x55fa6e155c80 } -0x55fa6e155c30->Object::~Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0, 0x55fa6e155c30 } -0x55fa6e155be0->Object::~Object { 0x55fa6e155b40, 0x55fa6e155b90, 0x55fa6e155be0 } -0x55fa6e155b90->Object::~Object { 0x55fa6e155b40, 0x55fa6e155b90 } -0x55fa6e155b40->Object::~Object { 0x55fa6e155b40 } -exception caught: printer -end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -stdout: -./c++.at:1555: ./check -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -stderr: -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Next token is token '@' () +Shifting token '@' () +Entering state 6 +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () ======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +705. cxx-type.at:409: ok +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS + +stderr: stderr: stdout: -./cxx-type.at:417: $PREPARSER ./types test-input +./c++.at:1361: $PREPARSER ./input aaaas +stdout: stderr: +./cxx-type.at:417: $PREPARSER ./types test-input +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +708. cxx-type.at:426: testing GLR: Resolve ambiguity, pure, locations ... +./cxx-type.at:427: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stderr: -stdout: 17.5: syntax error -./cxx-type.at:412: $PREPARSER ./types test-input ./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal stderr: -syntax error -./cxx-type.at:412: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./cxx-type.at:417: $PREPARSER ./types -p test-input stderr: -stdout: -./c++.at:1502: $PREPARSER ./parser -stderr: Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -254452,12 +255551,12 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) +./c++.at:1361: $PREPARSER ./input i stderr: -./c++.at:1502: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:412: $PREPARSER ./types -p test-input ./cxx-type.at:417: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -690. c++.at:1422: ok Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -255025,1164 +256124,228 @@ $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) stderr: +types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples 706. cxx-type.at:415: ok +./c++.at:1361: $PREPARSER ./input aaaap +./cxx-type.at:427: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 64): --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Entering state 13 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 23 -Reducing stack 0 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Entering state 13 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 22 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 29 +Stack now 0 Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 30 -Reducing stack 0 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +0x55701dea7b40->Object::Object { } +Next token is token 'a' (0x55701dea7b40 'a') +Shifting token 'a' (0x55701dea7b40 'a') Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55701dea7b40 'a') +-> $$ = nterm item (0x55701dea7b40 'a') +Entering state 10 +Stack now 0 10 Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 14 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 24 -Reading a token -Next token is token ';' () -Reducing stack 0 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +0x55701dea7b90->Object::Object { 0x55701dea7b40 } +Next token is token 'a' (0x55701dea7b90 'a') +Shifting token 'a' (0x55701dea7b90 'a') Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55701dea7b90 'a') +-> $$ = nterm item (0x55701dea7b90 'a') +Entering state 10 +Stack now 0 10 10 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ')' () -Stack 0 Entering state 18 -Next token is token ')' () -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' () -Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () -Stack 0 now in state 27 -On stack 1, shifting token ')' () -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '+' () -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '+' () -Stack 1 dies. -Removing dead stacks. -On stack 0, shifting token '+' () -Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Returning to deterministic operation. -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +0x55701dea7be0->Object::Object { 0x55701dea7b40, 0x55701dea7b90 } +Next token is token 'a' (0x55701dea7be0 'a') +Shifting token 'a' (0x55701dea7be0 'a') Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ')' () -Stack 0 Entering state 18 -Next token is token ')' () -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' () -Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () -Stack 0 now in state 27 -On stack 1, shifting token ')' () -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token ';' () -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token ';' () -On stack 0, shifting token ';' () -Stack 0 now in state 16 -On stack 1, shifting token ';' () -Stack 1 now in state 23 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME () -Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME () -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ')' () -Stack 0 Entering state 18 -Next token is token ')' () -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' () -Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () -Stack 0 now in state 27 -On stack 1, shifting token ')' () -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '=' () -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '=' () -On stack 0, shifting token '=' () -Stack 0 now in state 14 -On stack 1, shifting token '=' () -Stack 1 now in state 22 -Stack 0 Entering state 14 -Reading a token -Next token is token ID () -Stack 1 Entering state 22 -Next token is token ID () -On stack 0, shifting token ID () -Stack 0 now in state 5 -On stack 1, shifting token ID () -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token '+' () -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token '+' () -On stack 0, shifting token '+' () -Stack 0 now in state 15 -On stack 1, shifting token '+' () -Stack 1 now in state 15 -Stack 0 Entering state 15 -Reading a token -Next token is token ID () -Stack 1 Entering state 15 -Next token is token ID () -On stack 0, shifting token ID () -Stack 0 now in state 5 -On stack 1, shifting token ID () -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. -Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token ';' () -Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. -Stack 0 Entering state 8 -Next token is token ';' () -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. -Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token ';' () -On stack 0, shifting token ';' () -Stack 0 now in state 16 -On stack 1, shifting token ';' () -Stack 1 now in state 30 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME () -Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME () -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 20 -Next token is token ID () -syntax error -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () -Entering state 3 -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token ')' () -Error: discarding token ')' () -Reading a token -Next token is token '=' () -Error: discarding token '=' () -Reading a token -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token ';' () -Entering state 3 -Next token is token ';' () -Shifting token ';' () +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55701dea7be0 'a') +-> $$ = nterm item (0x55701dea7be0 'a') Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 +Stack now 0 10 10 10 Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +0x55701dea7c30->Object::Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0 } +Next token is token 'a' (0x55701dea7c30 'a') +Shifting token 'a' (0x55701dea7c30 'a') Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55701dea7c30 'a') +-> $$ = nterm item (0x55701dea7c30 'a') +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -Next token is token '@' () -Shifting token '@' () -Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () -./cxx-type.at:412: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +0x55701dea7c80->Object::Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0, 0x55701dea7c30 } +Next token is token 'p' (0x55701dea7c80 'p'Exception caught: cleaning lookahead and stack +0x55701dea7c80->Object::~Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0, 0x55701dea7c30, 0x55701dea7c80 } +0x55701dea7c30->Object::~Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0, 0x55701dea7c30 } +0x55701dea7be0->Object::~Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0 } +0x55701dea7b90->Object::~Object { 0x55701dea7b40, 0x55701dea7b90 } +0x55701dea7b40->Object::~Object { 0x55701dea7b40 } +exception caught: printer +end { } + +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 64): --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Entering state 13 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 23 -Reducing stack 0 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Entering state 13 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 22 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 29 +Stack now 0 Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 30 -Reducing stack 0 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +0x55701dea7b40->Object::Object { } +Next token is token 'a' (0x55701dea7b40 'a') +Shifting token 'a' (0x55701dea7b40 'a') Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55701dea7b40 'a') +-> $$ = nterm item (0x55701dea7b40 'a') +Entering state 10 +Stack now 0 10 Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '=' () -Shifting token '=' () -Entering state 14 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 24 -Reading a token -Next token is token ';' () -Reducing stack 0 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +0x55701dea7b90->Object::Object { 0x55701dea7b40 } +Next token is token 'a' (0x55701dea7b90 'a') +Shifting token 'a' (0x55701dea7b90 'a') Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55701dea7b90 'a') +-> $$ = nterm item (0x55701dea7b90 'a') +Entering state 10 +Stack now 0 10 10 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ')' () -Stack 0 Entering state 18 -Next token is token ')' () -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' () -Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () -Stack 0 now in state 27 -On stack 1, shifting token ')' () -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '+' () -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '+' () -Stack 1 dies. -Removing dead stacks. -On stack 0, shifting token '+' () -Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Returning to deterministic operation. -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +0x55701dea7be0->Object::Object { 0x55701dea7b40, 0x55701dea7b90 } +Next token is token 'a' (0x55701dea7be0 'a') +Shifting token 'a' (0x55701dea7be0 'a') Entering state 1 -Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ')' () -Stack 0 Entering state 18 -Next token is token ')' () -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' () -Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () -Stack 0 now in state 27 -On stack 1, shifting token ')' () -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token ';' () -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token ';' () -On stack 0, shifting token ';' () -Stack 0 now in state 16 -On stack 1, shifting token ';' () -Stack 1 now in state 23 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME () -Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME () -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ')' () -Stack 0 Entering state 18 -Next token is token ')' () -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' () -Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () -Stack 0 now in state 27 -On stack 1, shifting token ')' () -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '=' () -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '=' () -On stack 0, shifting token '=' () -Stack 0 now in state 14 -On stack 1, shifting token '=' () -Stack 1 now in state 22 -Stack 0 Entering state 14 -Reading a token -Next token is token ID () -Stack 1 Entering state 22 -Next token is token ID () -On stack 0, shifting token ID () -Stack 0 now in state 5 -On stack 1, shifting token ID () -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token '+' () -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token '+' () -On stack 0, shifting token '+' () -Stack 0 now in state 15 -On stack 1, shifting token '+' () -Stack 1 now in state 15 -Stack 0 Entering state 15 -Reading a token -Next token is token ID () -Stack 1 Entering state 15 -Next token is token ID () -On stack 0, shifting token ID () -Stack 0 now in state 5 -On stack 1, shifting token ID () -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. -Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token ';' () -Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. -Stack 0 Entering state 8 -Next token is token ';' () -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. -Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token ';' () -On stack 0, shifting token ';' () -Stack 0 now in state 16 -On stack 1, shifting token ';' () -Stack 1 now in state 30 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME () -Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME () -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' () -Shifting token '(' () -Entering state 12 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 18 -Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 20 -Next token is token ID () -syntax error -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () -Entering state 3 -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token ')' () -Error: discarding token ')' () -Reading a token -Next token is token '=' () -Error: discarding token '=' () -Reading a token -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token '+' () -Error: discarding token '+' () -Reading a token -Next token is token ID () -Error: discarding token ID () -Reading a token -Next token is token ';' () -Entering state 3 -Next token is token ';' () -Shifting token ';' () +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55701dea7be0 'a') +-> $$ = nterm item (0x55701dea7be0 'a') Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () -Entering state 1 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 8 -Reading a token -Next token is token '+' () -Shifting token '+' () -Entering state 15 -Reading a token -Next token is token ID () -Shifting token ID () -Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Entering state 8 +Stack now 0 10 10 10 Reading a token -Next token is token ';' () -Shifting token ';' () -Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +0x55701dea7c30->Object::Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0 } +Next token is token 'a' (0x55701dea7c30 'a') +Shifting token 'a' (0x55701dea7c30 'a') Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55701dea7c30 'a') +-> $$ = nterm item (0x55701dea7c30 'a') +Entering state 10 +Stack now 0 10 10 10 10 Reading a token -Next token is token '@' () -Shifting token '@' () -Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () -705. cxx-type.at:409: ok - - - -stderr: +0x55701dea7c80->Object::Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0, 0x55701dea7c30 } +Next token is token 'p' (0x55701dea7c80 'p'Exception caught: cleaning lookahead and stack +0x55701dea7c80->Object::~Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0, 0x55701dea7c30, 0x55701dea7c80 } +0x55701dea7c30->Object::~Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0, 0x55701dea7c30 } +0x55701dea7be0->Object::~Object { 0x55701dea7b40, 0x55701dea7b90, 0x55701dea7be0 } +0x55701dea7b90->Object::~Object { 0x55701dea7b40, 0x55701dea7b90 } +0x55701dea7b40->Object::~Object { 0x55701dea7b40 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr stdout: -./cxx-type.at:423: $PREPARSER ./types test-input +exception caught: printer +./c++.at:1361: $PREPARSER ./input aaaae stderr: +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 709. cxx-type.at:432: testing GLR: Merge conflicting parses, impure, no locations ... ./cxx-type.at:433: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -syntax error -./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -710. cxx-type.at:438: testing GLR: Merge conflicting parses, impure, locations ... -./cxx-type.at:439: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ... +./c++.at:1361: $PREPARSER ./input aaaaE stderr: -./cxx-type.at:423: $PREPARSER ./types -p test-input +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +======== Testing with C++ standard flags: '' types.y:77.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./cxx-type.at:433: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS stderr: +stdout: +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1066: $PREPARSER ./input < in +./c++.at:1555: $PREPARSER ./test +stderr: +stderr: +stderr: +error: invalid character +stdout: +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:574: $here/modern +stdout: +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Modern C++: 202100 +./c++.at:574: $PREPARSER ./list +stderr: +Destroy: "" +Destroy: "" +Destroy: 1 +Destroy: "" +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: "" +Destroy: 3 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: () +Destroy: 5 +Destroy: () +Destroy: "" +Destroy: "" +Destroy: () +Destroy: (0, 1, 2, 4, 6) +======== Testing with C++ standard flags: '' +======== Testing with C++ standard flags: '' +./c++.at:574: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +674. c++.at:574: ok +stderr: +stdout: +./cxx-type.at:423: $PREPARSER ./types test-input + +stderr: +syntax error +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:423: $PREPARSER ./types -p test-input +stderr: Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -256749,13 +256912,14 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -stdout: -./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./c++.at:857: $PREPARSER ./input ./cxx-type.at:423: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1555: ./check stderr: +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +stderr: +stdout: Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 64): @@ -257322,57 +257486,25 @@ Reducing stack 0 by rule 6 (line 77): $1 = token '@' () Cleanup: popping nterm prog () -======== Testing with C++ standard flags: '' -707. cxx-type.at:420: ok -stderr: -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] -types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] -types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +710. cxx-type.at:438: testing GLR: Merge conflicting parses, impure, locations ... +./cxx-type.at:439: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +707. cxx-type.at:420: ./c++.at:1066: ./check + ok +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS + +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS stderr: types.y:87.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./c++.at:1066: $PREPARSER ./input < in -./cxx-type.at:445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -stderr: -error: invalid expression -caught error -error: invalid character -caught error ./cxx-type.at:439: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./c++.at:1066: $PREPARSER ./input < in -stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -stderr: -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -712. cxx-type.at:449: testing GLR: Merge conflicting parses, pure, locations ... -======== Testing with C++ standard flags: '' -./cxx-type.at:450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +711. cxx-type.at:444: testing GLR: Merge conflicting parses, pure, no locations ... +./cxx-type.at:445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y stderr: -types.y:87.8-37: warning: unset value: $$ [-Wother] +types.y:77.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -./cxx-type.at:450: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -stderr: -stdout: -./c++.at:1066: ./check -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./cxx-type.at:445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS stderr: stdout: ./c++.at:1362: $PREPARSER ./input aaaas @@ -257383,274 +257515,148 @@ stderr: exception caught: yylex ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1362: $PREPARSER ./input i stderr: exception caught: initial-action ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaap -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: stdout: ./cxx-type.at:429: $PREPARSER ./types test-input -./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: +./c++.at:1362: $PREPARSER ./input aaaap 17.5: syntax error stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7fffc23ac42f->Object::Object { } -0x7fffc23ac4d0->Object::Object { 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'a' (0x7fffc23ac4d0 'a') -0x7fffc23ac440->Object::Object { 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::Object { 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::~Object { 0x7fffc23ac3f7, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x7fffc23ac440, 0x7fffc23ac4d0 } -Shifting token 'a' (0x7fffc23ac440 'a') -0x559720c3cee0->Object::Object { 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::Object { 0x559720c3cee0, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::~Object { 0x559720c3cee0, 0x7fffc23ac3cf, 0x7fffc23ac440 } -0x7fffc23ac440->Object::~Object { 0x559720c3cee0, 0x7fffc23ac440 } -Entering state 2 -Stack now 0 2 -0x7fffc23ac4f0->Object::Object { 0x559720c3cee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x559720c3cee0 'a') --> $$ = nterm item (0x7fffc23ac4f0 'a') -0x559720c3cee0->Object::~Object { 0x559720c3cee0, 0x7fffc23ac4f0 } -0x559720c3cee0->Object::Object { 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::Object { 0x559720c3cee0, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::~Object { 0x559720c3cee0, 0x7fffc23ac4a8, 0x7fffc23ac4f0 } -0x7fffc23ac4f0->Object::~Object { 0x559720c3cee0, 0x7fffc23ac4f0 } -Entering state 11 -Stack now 0 11 -Reading a token -0x7fffc23ac42f->Object::Object { 0x559720c3cee0 } -0x7fffc23ac4d0->Object::Object { 0x559720c3cee0, 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x559720c3cee0, 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'a' (0x7fffc23ac4d0 'a') -0x7fffc23ac440->Object::Object { 0x559720c3cee0, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::Object { 0x559720c3cee0, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::~Object { 0x559720c3cee0, 0x7fffc23ac3f7, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x559720c3cee0, 0x7fffc23ac440, 0x7fffc23ac4d0 } -Shifting token 'a' (0x7fffc23ac440 'a') -0x559720c3cf00->Object::Object { 0x559720c3cee0, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac3cf, 0x7fffc23ac440 } -0x7fffc23ac440->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440 } -Entering state 2 -Stack now 0 11 2 -0x7fffc23ac4f0->Object::Object { 0x559720c3cee0, 0x559720c3cf00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x559720c3cf00 'a') --> $$ = nterm item (0x7fffc23ac4f0 'a') -0x559720c3cf00->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4f0 } -0x559720c3cf00->Object::Object { 0x559720c3cee0, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4a8, 0x7fffc23ac4f0 } -0x7fffc23ac4f0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4f0 } -Entering state 11 -Stack now 0 11 11 -Reading a token -0x7fffc23ac42f->Object::Object { 0x559720c3cee0, 0x559720c3cf00 } -0x7fffc23ac4d0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'a' (0x7fffc23ac4d0 'a') -0x7fffc23ac440->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac3f7, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440, 0x7fffc23ac4d0 } -Shifting token 'a' (0x7fffc23ac440 'a') -0x559720c3cf20->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac3cf, 0x7fffc23ac440 } -0x7fffc23ac440->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440 } -Entering state 2 -Stack now 0 11 11 2 -0x7fffc23ac4f0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x559720c3cf20 'a') --> $$ = nterm item (0x7fffc23ac4f0 'a') -0x559720c3cf20->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4f0 } -0x559720c3cf20->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4a8, 0x7fffc23ac4f0 } -0x7fffc23ac4f0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4f0 } -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x7fffc23ac42f->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20 } -0x7fffc23ac4d0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'a' (0x7fffc23ac4d0 'a') -0x7fffc23ac440->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac3f7, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440, 0x7fffc23ac4d0 } -Shifting token 'a' (0x7fffc23ac440 'a') -0x559720c3cf40->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac3cf, 0x7fffc23ac440 } -0x7fffc23ac440->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac440 } -Entering state 2 -Stack now 0 11 11 11 2 -0x7fffc23ac4f0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x559720c3cf40 'a') --> $$ = nterm item (0x7fffc23ac4f0 'a') -0x559720c3cf40->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4f0 } -0x559720c3cf40->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4a8, 0x7fffc23ac4f0 } -0x7fffc23ac4f0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4f0 } -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x7fffc23ac42f->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40 } -0x7fffc23ac4d0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'p' (0x7fffc23ac4d0 'p'Exception caught: cleaning lookahead and stack -0x559720c3cf40->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4d0 } -0x559720c3cf20->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4d0 } -0x559720c3cf00->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4d0 } -0x559720c3cee0->Object::~Object { 0x559720c3cee0, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x7fffc23ac4d0 } -exception caught: printer -end { } ./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input --debug aaaap +./cxx-type.at:429: $PREPARSER ./types -p test-input stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7fffc23ac42f->Object::Object { } -0x7fffc23ac4d0->Object::Object { 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'a' (0x7fffc23ac4d0 'a') -0x7fffc23ac440->Object::Object { 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::Object { 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::~Object { 0x7fffc23ac3f7, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x7fffc23ac440, 0x7fffc23ac4d0 } -Shifting token 'a' (0x7fffc23ac440 'a') -0x559720c3cee0->Object::Object { 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::Object { 0x559720c3cee0, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::~Object { 0x559720c3cee0, 0x7fffc23ac3cf, 0x7fffc23ac440 } -0x7fffc23ac440->Object::~Object { 0x559720c3cee0, 0x7fffc23ac440 } +0x7ffcd472f1cf->Object::Object { } +0x7ffcd472f270->Object::Object { 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'a' (0x7ffcd472f270 'a') +0x7ffcd472f1e0->Object::Object { 0x7ffcd472f270 } +0x7ffcd472f197->Object::Object { 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::~Object { 0x7ffcd472f197, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x7ffcd472f1e0, 0x7ffcd472f270 } +Shifting token 'a' (0x7ffcd472f1e0 'a') +0x5600cfbbfee0->Object::Object { 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f16f, 0x7ffcd472f1e0 } +0x7ffcd472f1e0->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f1e0 } Entering state 2 Stack now 0 2 -0x7fffc23ac4f0->Object::Object { 0x559720c3cee0 } +0x7ffcd472f290->Object::Object { 0x5600cfbbfee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x559720c3cee0 'a') --> $$ = nterm item (0x7fffc23ac4f0 'a') -0x559720c3cee0->Object::~Object { 0x559720c3cee0, 0x7fffc23ac4f0 } -0x559720c3cee0->Object::Object { 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::Object { 0x559720c3cee0, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::~Object { 0x559720c3cee0, 0x7fffc23ac4a8, 0x7fffc23ac4f0 } -0x7fffc23ac4f0->Object::~Object { 0x559720c3cee0, 0x7fffc23ac4f0 } + $1 = token 'a' (0x5600cfbbfee0 'a') +-> $$ = nterm item (0x7ffcd472f290 'a') +0x5600cfbbfee0->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f290 } +0x5600cfbbfee0->Object::Object { 0x7ffcd472f290 } +0x7ffcd472f248->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f290 } +0x7ffcd472f248->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f248, 0x7ffcd472f290 } +0x7ffcd472f290->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f290 } Entering state 11 Stack now 0 11 Reading a token -0x7fffc23ac42f->Object::Object { 0x559720c3cee0 } -0x7fffc23ac4d0->Object::Object { 0x559720c3cee0, 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x559720c3cee0, 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'a' (0x7fffc23ac4d0 'a') -0x7fffc23ac440->Object::Object { 0x559720c3cee0, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::Object { 0x559720c3cee0, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::~Object { 0x559720c3cee0, 0x7fffc23ac3f7, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x559720c3cee0, 0x7fffc23ac440, 0x7fffc23ac4d0 } -Shifting token 'a' (0x7fffc23ac440 'a') -0x559720c3cf00->Object::Object { 0x559720c3cee0, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac3cf, 0x7fffc23ac440 } -0x7fffc23ac440->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440 } +0x7ffcd472f1cf->Object::Object { 0x5600cfbbfee0 } +0x7ffcd472f270->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'a' (0x7ffcd472f270 'a') +0x7ffcd472f1e0->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f197, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f1e0, 0x7ffcd472f270 } +Shifting token 'a' (0x7ffcd472f1e0 'a') +0x5600cfbbff00->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f16f, 0x7ffcd472f1e0 } +0x7ffcd472f1e0->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0 } Entering state 2 Stack now 0 11 2 -0x7fffc23ac4f0->Object::Object { 0x559720c3cee0, 0x559720c3cf00 } +0x7ffcd472f290->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x559720c3cf00 'a') --> $$ = nterm item (0x7fffc23ac4f0 'a') -0x559720c3cf00->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4f0 } -0x559720c3cf00->Object::Object { 0x559720c3cee0, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4a8, 0x7fffc23ac4f0 } -0x7fffc23ac4f0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4f0 } + $1 = token 'a' (0x5600cfbbff00 'a') +-> $$ = nterm item (0x7ffcd472f290 'a') +0x5600cfbbff00->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f290 } +0x5600cfbbff00->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f290 } +0x7ffcd472f248->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f290 } +0x7ffcd472f248->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f248, 0x7ffcd472f290 } +0x7ffcd472f290->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f290 } Entering state 11 Stack now 0 11 11 Reading a token -0x7fffc23ac42f->Object::Object { 0x559720c3cee0, 0x559720c3cf00 } -0x7fffc23ac4d0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'a' (0x7fffc23ac4d0 'a') -0x7fffc23ac440->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac3f7, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440, 0x7fffc23ac4d0 } -Shifting token 'a' (0x7fffc23ac440 'a') -0x559720c3cf20->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac3cf, 0x7fffc23ac440 } -0x7fffc23ac440->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440 } +0x7ffcd472f1cf->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00 } +0x7ffcd472f270->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'a' (0x7ffcd472f270 'a') +0x7ffcd472f1e0->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f270 } +0x7ffcd472f197->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f197, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0, 0x7ffcd472f270 } +Shifting token 'a' (0x7ffcd472f1e0 'a') +0x5600cfbbff20->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f16f, 0x7ffcd472f1e0 } +0x7ffcd472f1e0->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0 } Entering state 2 Stack now 0 11 11 2 -0x7fffc23ac4f0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20 } +0x7ffcd472f290->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x559720c3cf20 'a') --> $$ = nterm item (0x7fffc23ac4f0 'a') -0x559720c3cf20->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4f0 } -0x559720c3cf20->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4a8, 0x7fffc23ac4f0 } -0x7fffc23ac4f0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4f0 } + $1 = token 'a' (0x5600cfbbff20 'a') +-> $$ = nterm item (0x7ffcd472f290 'a') +0x5600cfbbff20->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f290 } +0x5600cfbbff20->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f290 } +0x7ffcd472f248->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f290 } +0x7ffcd472f248->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f248, 0x7ffcd472f290 } +0x7ffcd472f290->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f290 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7fffc23ac42f->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20 } -0x7fffc23ac4d0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'a' (0x7fffc23ac4d0 'a') -0x7fffc23ac440->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac3f7->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac3f7, 0x7fffc23ac440, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440, 0x7fffc23ac4d0 } -Shifting token 'a' (0x7fffc23ac440 'a') -0x559720c3cf40->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac440 } -0x7fffc23ac3cf->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac3cf, 0x7fffc23ac440 } -0x7fffc23ac440->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac440 } +0x7ffcd472f1cf->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20 } +0x7ffcd472f270->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'a' (0x7ffcd472f270 'a') +0x7ffcd472f1e0->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f270 } +0x7ffcd472f197->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f197, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0, 0x7ffcd472f270 } +Shifting token 'a' (0x7ffcd472f1e0 'a') +0x5600cfbbff40->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f16f, 0x7ffcd472f1e0 } +0x7ffcd472f1e0->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f1e0 } Entering state 2 Stack now 0 11 11 11 2 -0x7fffc23ac4f0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40 } +0x7ffcd472f290->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x559720c3cf40 'a') --> $$ = nterm item (0x7fffc23ac4f0 'a') -0x559720c3cf40->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4f0 } -0x559720c3cf40->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4f0 } -0x7fffc23ac4a8->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4a8, 0x7fffc23ac4f0 } -0x7fffc23ac4f0->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4f0 } + $1 = token 'a' (0x5600cfbbff40 'a') +-> $$ = nterm item (0x7ffcd472f290 'a') +0x5600cfbbff40->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f290 } +0x5600cfbbff40->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f290 } +0x7ffcd472f248->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f290 } +0x7ffcd472f248->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f248, 0x7ffcd472f290 } +0x7ffcd472f290->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f290 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7fffc23ac42f->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40 } -0x7fffc23ac4d0->Object::Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac42f } -0x7fffc23ac42f->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac42f, 0x7fffc23ac4d0 } -Next token is token 'p' (0x7fffc23ac4d0 'p'Exception caught: cleaning lookahead and stack -0x559720c3cf40->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x559720c3cf40, 0x7fffc23ac4d0 } -0x559720c3cf20->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x559720c3cf20, 0x7fffc23ac4d0 } -0x559720c3cf00->Object::~Object { 0x559720c3cee0, 0x559720c3cf00, 0x7fffc23ac4d0 } -0x559720c3cee0->Object::~Object { 0x559720c3cee0, 0x7fffc23ac4d0 } -0x7fffc23ac4d0->Object::~Object { 0x7fffc23ac4d0 } +0x7ffcd472f1cf->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40 } +0x7ffcd472f270->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'p' (0x7ffcd472f270 'p'Exception caught: cleaning lookahead and stack +0x5600cfbbff40->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f270 } +0x5600cfbbff20->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f270 } +0x5600cfbbff00->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f270 } +0x5600cfbbfee0->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x7ffcd472f270 } exception caught: printer end { } -./c++.at:1362: grep '^exception caught: printer$' stderr -./cxx-type.at:429: $PREPARSER ./types -p test-input -stdout: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stderr: -exception caught: printer Starting parse Entering state 0 Reducing stack 0 by rule 1 (line 71): @@ -258217,12 +258223,139 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) -./c++.at:1362: $PREPARSER ./input aaaae -./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: stderr: -exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +stderr: +./c++.at:851: $PREPARSER ./input +./cxx-type.at:429: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1064: $PREPARSER ./input < in +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffcd472f1cf->Object::Object { } +0x7ffcd472f270->Object::Object { 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'a' (0x7ffcd472f270 'a') +0x7ffcd472f1e0->Object::Object { 0x7ffcd472f270 } +0x7ffcd472f197->Object::Object { 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::~Object { 0x7ffcd472f197, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x7ffcd472f1e0, 0x7ffcd472f270 } +Shifting token 'a' (0x7ffcd472f1e0 'a') +0x5600cfbbfee0->Object::Object { 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f16f, 0x7ffcd472f1e0 } +0x7ffcd472f1e0->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f1e0 } +Entering state 2 +Stack now 0 2 +0x7ffcd472f290->Object::Object { 0x5600cfbbfee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5600cfbbfee0 'a') +-> $$ = nterm item (0x7ffcd472f290 'a') +0x5600cfbbfee0->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f290 } +0x5600cfbbfee0->Object::Object { 0x7ffcd472f290 } +0x7ffcd472f248->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f290 } +0x7ffcd472f248->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f248, 0x7ffcd472f290 } +0x7ffcd472f290->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f290 } +Entering state 11 +Stack now 0 11 +Reading a token +0x7ffcd472f1cf->Object::Object { 0x5600cfbbfee0 } +0x7ffcd472f270->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'a' (0x7ffcd472f270 'a') +0x7ffcd472f1e0->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f197, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f1e0, 0x7ffcd472f270 } +Shifting token 'a' (0x7ffcd472f1e0 'a') +0x5600cfbbff00->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f16f, 0x7ffcd472f1e0 } +0x7ffcd472f1e0->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0 } +Entering state 2 +Stack now 0 11 2 +0x7ffcd472f290->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5600cfbbff00 'a') +-> $$ = nterm item (0x7ffcd472f290 'a') +0x5600cfbbff00->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f290 } +0x5600cfbbff00->Object::Object { 0x5600cfbbfee0, 0x7ffcd472f290 } +0x7ffcd472f248->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f290 } +0x7ffcd472f248->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f248, 0x7ffcd472f290 } +0x7ffcd472f290->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f290 } +Entering state 11 +Stack now 0 11 11 +Reading a token +0x7ffcd472f1cf->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00 } +0x7ffcd472f270->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'a' (0x7ffcd472f270 'a') +0x7ffcd472f1e0->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f270 } +0x7ffcd472f197->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f197, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0, 0x7ffcd472f270 } +Shifting token 'a' (0x7ffcd472f1e0 'a') +0x5600cfbbff20->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f16f, 0x7ffcd472f1e0 } +0x7ffcd472f1e0->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0 } +Entering state 2 +Stack now 0 11 11 2 +0x7ffcd472f290->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5600cfbbff20 'a') +-> $$ = nterm item (0x7ffcd472f290 'a') +0x5600cfbbff20->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f290 } +0x5600cfbbff20->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f290 } +0x7ffcd472f248->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f290 } +0x7ffcd472f248->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f248, 0x7ffcd472f290 } +0x7ffcd472f290->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f290 } +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x7ffcd472f1cf->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20 } +0x7ffcd472f270->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'a' (0x7ffcd472f270 'a') +0x7ffcd472f1e0->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f270 } +0x7ffcd472f197->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f197->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f197, 0x7ffcd472f1e0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0, 0x7ffcd472f270 } +Shifting token 'a' (0x7ffcd472f1e0 'a') +0x5600cfbbff40->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f1e0 } +0x7ffcd472f16f->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f16f, 0x7ffcd472f1e0 } +0x7ffcd472f1e0->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f1e0 } +Entering state 2 +Stack now 0 11 11 11 2 +0x7ffcd472f290->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x5600cfbbff40 'a') +-> $$ = nterm item (0x7ffcd472f290 'a') +0x5600cfbbff40->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f290 } +0x5600cfbbff40->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f290 } +0x7ffcd472f248->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f290 } +0x7ffcd472f248->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f248, 0x7ffcd472f290 } +0x7ffcd472f290->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f290 } +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x7ffcd472f1cf->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40 } +0x7ffcd472f270->Object::Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f1cf } +0x7ffcd472f1cf->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f1cf, 0x7ffcd472f270 } +Next token is token 'p' (0x7ffcd472f270 'p'Exception caught: cleaning lookahead and stack +0x5600cfbbff40->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x5600cfbbff40, 0x7ffcd472f270 } +0x5600cfbbff20->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x5600cfbbff20, 0x7ffcd472f270 } +0x5600cfbbff00->Object::~Object { 0x5600cfbbfee0, 0x5600cfbbff00, 0x7ffcd472f270 } +0x5600cfbbfee0->Object::~Object { 0x5600cfbbfee0, 0x7ffcd472f270 } +0x7ffcd472f270->Object::~Object { 0x7ffcd472f270 } +exception caught: printer +end { } +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: grep '^exception caught: printer$' stderr stderr: Starting parse Entering state 0 @@ -258791,333 +258924,57 @@ $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) stdout: -./c++.at:1361: $PREPARSER ./input aaaas stderr: -708. cxx-type.at:426: ok -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal +exception caught: printer +708. cxx-type.at:426: ./c++.at:1362: $PREPARSER ./input aaaae + ok +======== Testing with C++ standard flags: '' +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in ./c++.at:1362: $PREPARSER ./input aaaaE -./c++.at:1361: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap stderr: + stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: $PREPARSER ./input --debug aaaap ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x560ecc6c6b40->Object::Object { } -Next token is token 'a' (0x560ecc6c6b40 'a') -Shifting token 'a' (0x560ecc6c6b40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560ecc6c6b40 'a') --> $$ = nterm item (0x560ecc6c6b40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x560ecc6c6b90->Object::Object { 0x560ecc6c6b40 } -Next token is token 'a' (0x560ecc6c6b90 'a') -Shifting token 'a' (0x560ecc6c6b90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560ecc6c6b90 'a') --> $$ = nterm item (0x560ecc6c6b90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x560ecc6c6be0->Object::Object { 0x560ecc6c6b40, 0x560ecc6c6b90 } -Next token is token 'a' (0x560ecc6c6be0 'a') -Shifting token 'a' (0x560ecc6c6be0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560ecc6c6be0 'a') --> $$ = nterm item (0x560ecc6c6be0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x560ecc6c6c30->Object::Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0 } -Next token is token 'a' (0x560ecc6c6c30 'a') -Shifting token 'a' (0x560ecc6c6c30 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560ecc6c6c30 'a') --> $$ = nterm item (0x560ecc6c6c30 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x560ecc6c6c80->Object::Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0, 0x560ecc6c6c30 } -Next token is token 'p' (0x560ecc6c6c80 'p'Exception caught: cleaning lookahead and stack -0x560ecc6c6c80->Object::~Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0, 0x560ecc6c6c30, 0x560ecc6c6c80 } -0x560ecc6c6c30->Object::~Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0, 0x560ecc6c6c30 } -0x560ecc6c6be0->Object::~Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0 } -0x560ecc6c6b90->Object::~Object { 0x560ecc6c6b40, 0x560ecc6c6b90 } -0x560ecc6c6b40->Object::~Object { 0x560ecc6c6b40 } -exception caught: printer -end { } - -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x560ecc6c6b40->Object::Object { } -Next token is token 'a' (0x560ecc6c6b40 'a') -Shifting token 'a' (0x560ecc6c6b40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560ecc6c6b40 'a') --> $$ = nterm item (0x560ecc6c6b40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x560ecc6c6b90->Object::Object { 0x560ecc6c6b40 } -Next token is token 'a' (0x560ecc6c6b90 'a') -Shifting token 'a' (0x560ecc6c6b90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560ecc6c6b90 'a') --> $$ = nterm item (0x560ecc6c6b90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x560ecc6c6be0->Object::Object { 0x560ecc6c6b40, 0x560ecc6c6b90 } -Next token is token 'a' (0x560ecc6c6be0 'a') -Shifting token 'a' (0x560ecc6c6be0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560ecc6c6be0 'a') --> $$ = nterm item (0x560ecc6c6be0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x560ecc6c6c30->Object::Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0 } -Next token is token 'a' (0x560ecc6c6c30 'a') -Shifting token 'a' (0x560ecc6c6c30 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x560ecc6c6c30 'a') --> $$ = nterm item (0x560ecc6c6c30 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x560ecc6c6c80->Object::Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0, 0x560ecc6c6c30 } -Next token is token 'p' (0x560ecc6c6c80 'p'Exception caught: cleaning lookahead and stack -0x560ecc6c6c80->Object::~Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0, 0x560ecc6c6c30, 0x560ecc6c6c80 } -0x560ecc6c6c30->Object::~Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0, 0x560ecc6c6c30 } -0x560ecc6c6be0->Object::~Object { 0x560ecc6c6b40, 0x560ecc6c6b90, 0x560ecc6c6be0 } -0x560ecc6c6b90->Object::~Object { 0x560ecc6c6b40, 0x560ecc6c6b90 } -0x560ecc6c6b40->Object::~Object { 0x560ecc6c6b40 } -exception caught: printer -end { } -./c++.at:1361: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae ./c++.at:1362: $PREPARSER ./input aaaaT -stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in stderr: stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1361: $PREPARSER ./input aaaaE -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaR -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +712. cxx-type.at:449: testing GLR: Merge conflicting parses, pure, locations ... ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaT -713. cxx-type.at:455: testing GLR: Verbose messages, resolve ambiguity, impure, no locations ... -stderr: +======== Testing with C++ standard flags: '' +./cxx-type.at:450: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +./c++.at:1064: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS ======== Testing with C++ standard flags: '' ./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./cxx-type.at:456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -./c++.at:1361: $PREPARSER ./input aaaaR -stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1363: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i -stderr: -types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y:87.8-37: warning: unset value: $$ [-Wother] types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples -======== Testing with C++ standard flags: '' -./cxx-type.at:456: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -exception caught: initial-action +./cxx-type.at:450: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: ./cxx-type.at:435: $PREPARSER ./types test-input stderr: -./c++.at:1363: $PREPARSER ./input aaaap syntax error -stderr: ./cxx-type.at:435: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap -stderr: ./cxx-type.at:435: $PREPARSER ./types -p test-input -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffc06f7fdcf->Object::Object { } -0x7ffc06f7fe90->Object::Object { 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'a' (0x7ffc06f7fe90 'a') -0x7ffc06f7fe00->Object::Object { 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::Object { 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::~Object { 0x7ffc06f7fdb7, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -Shifting token 'a' (0x7ffc06f7fe00 'a') -0x55b07d870ee0->Object::Object { 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fd8f, 0x7ffc06f7fe00 } -0x7ffc06f7fe00->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fe00 } -Entering state 1 -Stack now 0 1 -0x7ffc06f7feb0->Object::Object { 0x55b07d870ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07d870ee0 'a') --> $$ = nterm item (0x7ffc06f7feb0 'a') -0x55b07d870ee0->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7feb0 } -0x55b07d870ee0->Object::Object { 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::Object { 0x55b07d870ee0, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fe68, 0x7ffc06f7feb0 } -0x7ffc06f7feb0->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7feb0 } -Entering state 10 -Stack now 0 10 -Reading a token -0x7ffc06f7fdcf->Object::Object { 0x55b07d870ee0 } -0x7ffc06f7fe90->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'a' (0x7ffc06f7fe90 'a') -0x7ffc06f7fe00->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fdb7, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -Shifting token 'a' (0x7ffc06f7fe00 'a') -0x55b07d870f00->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fd8f, 0x7ffc06f7fe00 } -0x7ffc06f7fe00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00 } -Entering state 1 -Stack now 0 10 1 -0x7ffc06f7feb0->Object::Object { 0x55b07d870ee0, 0x55b07d870f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07d870f00 'a') --> $$ = nterm item (0x7ffc06f7feb0 'a') -0x55b07d870f00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7feb0 } -0x55b07d870f00->Object::Object { 0x55b07d870ee0, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe68, 0x7ffc06f7feb0 } -0x7ffc06f7feb0->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7feb0 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0x7ffc06f7fdcf->Object::Object { 0x55b07d870ee0, 0x55b07d870f00 } -0x7ffc06f7fe90->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'a' (0x7ffc06f7fe90 'a') -0x7ffc06f7fe00->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fdb7, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -Shifting token 'a' (0x7ffc06f7fe00 'a') -0x55b07d870f20->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fd8f, 0x7ffc06f7fe00 } -0x7ffc06f7fe00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00 } -Entering state 1 -Stack now 0 10 10 1 -0x7ffc06f7feb0->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07d870f20 'a') --> $$ = nterm item (0x7ffc06f7feb0 'a') -0x55b07d870f20->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7feb0 } -0x55b07d870f20->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe68, 0x7ffc06f7feb0 } -0x7ffc06f7feb0->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7feb0 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x7ffc06f7fdcf->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20 } -0x7ffc06f7fe90->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'a' (0x7ffc06f7fe90 'a') -0x7ffc06f7fe00->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fdb7, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -Shifting token 'a' (0x7ffc06f7fe00 'a') -0x55b07d870f40->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fd8f, 0x7ffc06f7fe00 } -0x7ffc06f7fe00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fe00 } -Entering state 1 -Stack now 0 10 10 10 1 -0x7ffc06f7feb0->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07d870f40 'a') --> $$ = nterm item (0x7ffc06f7feb0 'a') -0x55b07d870f40->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7feb0 } -0x55b07d870f40->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fe68, 0x7ffc06f7feb0 } -0x7ffc06f7feb0->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7feb0 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x7ffc06f7fdcf->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40 } -0x7ffc06f7fe90->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'p' (0x7ffc06f7fe90 'p'Exception caught: cleaning lookahead and stack -0x55b07d870f40->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fe90 } -0x55b07d870f20->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe90 } -0x55b07d870f00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe90 } -0x55b07d870ee0->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x7ffc06f7fe90 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -259731,132 +259588,6 @@ stderr: Starting parse Entering state 0 -Stack now 0 -Reading a token -0x7ffc06f7fdcf->Object::Object { } -0x7ffc06f7fe90->Object::Object { 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'a' (0x7ffc06f7fe90 'a') -0x7ffc06f7fe00->Object::Object { 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::Object { 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::~Object { 0x7ffc06f7fdb7, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -Shifting token 'a' (0x7ffc06f7fe00 'a') -0x55b07d870ee0->Object::Object { 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fd8f, 0x7ffc06f7fe00 } -0x7ffc06f7fe00->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fe00 } -Entering state 1 -Stack now 0 1 -0x7ffc06f7feb0->Object::Object { 0x55b07d870ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07d870ee0 'a') --> $$ = nterm item (0x7ffc06f7feb0 'a') -0x55b07d870ee0->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7feb0 } -0x55b07d870ee0->Object::Object { 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::Object { 0x55b07d870ee0, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fe68, 0x7ffc06f7feb0 } -0x7ffc06f7feb0->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7feb0 } -Entering state 10 -Stack now 0 10 -Reading a token -0x7ffc06f7fdcf->Object::Object { 0x55b07d870ee0 } -0x7ffc06f7fe90->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'a' (0x7ffc06f7fe90 'a') -0x7ffc06f7fe00->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fdb7, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -Shifting token 'a' (0x7ffc06f7fe00 'a') -0x55b07d870f00->Object::Object { 0x55b07d870ee0, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fd8f, 0x7ffc06f7fe00 } -0x7ffc06f7fe00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00 } -Entering state 1 -Stack now 0 10 1 -0x7ffc06f7feb0->Object::Object { 0x55b07d870ee0, 0x55b07d870f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07d870f00 'a') --> $$ = nterm item (0x7ffc06f7feb0 'a') -0x55b07d870f00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7feb0 } -0x55b07d870f00->Object::Object { 0x55b07d870ee0, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe68, 0x7ffc06f7feb0 } -0x7ffc06f7feb0->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7feb0 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0x7ffc06f7fdcf->Object::Object { 0x55b07d870ee0, 0x55b07d870f00 } -0x7ffc06f7fe90->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'a' (0x7ffc06f7fe90 'a') -0x7ffc06f7fe00->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fdb7, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -Shifting token 'a' (0x7ffc06f7fe00 'a') -0x55b07d870f20->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fd8f, 0x7ffc06f7fe00 } -0x7ffc06f7fe00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00 } -Entering state 1 -Stack now 0 10 10 1 -0x7ffc06f7feb0->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07d870f20 'a') --> $$ = nterm item (0x7ffc06f7feb0 'a') -0x55b07d870f20->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7feb0 } -0x55b07d870f20->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe68, 0x7ffc06f7feb0 } -0x7ffc06f7feb0->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7feb0 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x7ffc06f7fdcf->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20 } -0x7ffc06f7fe90->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'a' (0x7ffc06f7fe90 'a') -0x7ffc06f7fe00->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fdb7->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fdb7, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00, 0x7ffc06f7fe90 } -Shifting token 'a' (0x7ffc06f7fe00 'a') -0x55b07d870f40->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fe00 } -0x7ffc06f7fd8f->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fd8f, 0x7ffc06f7fe00 } -0x7ffc06f7fe00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fe00 } -Entering state 1 -Stack now 0 10 10 10 1 -0x7ffc06f7feb0->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b07d870f40 'a') --> $$ = nterm item (0x7ffc06f7feb0 'a') -0x55b07d870f40->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7feb0 } -0x55b07d870f40->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7feb0 } -0x7ffc06f7fe68->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fe68, 0x7ffc06f7feb0 } -0x7ffc06f7feb0->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7feb0 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x7ffc06f7fdcf->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40 } -0x7ffc06f7fe90->Object::Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fdcf } -0x7ffc06f7fdcf->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fdcf, 0x7ffc06f7fe90 } -Next token is token 'p' (0x7ffc06f7fe90 'p'Exception caught: cleaning lookahead and stack -0x55b07d870f40->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x55b07d870f40, 0x7ffc06f7fe90 } -0x55b07d870f20->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x55b07d870f20, 0x7ffc06f7fe90 } -0x55b07d870f00->Object::~Object { 0x55b07d870ee0, 0x55b07d870f00, 0x7ffc06f7fe90 } -0x55b07d870ee0->Object::~Object { 0x55b07d870ee0, 0x7ffc06f7fe90 } -0x7ffc06f7fe90->Object::~Object { 0x7ffc06f7fe90 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -stderr: -stdout: -Starting parse -Entering state 0 Reducing stack 0 by rule 1 (line 64): -> $$ = nterm prog () Entering state 1 @@ -260464,1272 +260195,814 @@ $1 = token '@' () Cleanup: popping nterm prog () 709. cxx-type.at:432: ok -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae stderr: -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE +stdout: +./c++.at:856: $PREPARSER ./input +stderr: +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +stdout: +./c++.at:856: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1065: $PREPARSER ./input < in +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:1555: $PREPARSER ./test +./c++.at:1360: $PREPARSER ./input aaaas stderr: -./c++.at:1363: $PREPARSER ./input aaaaT +./c++.at:1065: $PREPARSER ./input < in +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +713. cxx-type.at:455: testing GLR: Verbose messages, resolve ambiguity, impure, no locations ... stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:1363: $PREPARSER ./input aaaaR +./cxx-type.at:456: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o types.c types.y +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +exception caught: yylex +./c++.at:1065: $PREPARSER ./input < in +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -714. glr-regression.at:205: testing Badly Collapsed GLR States: glr.c ... -./glr-regression.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.c glr-regr1.y -stdout: -./cxx-type.at:441: $PREPARSER ./types test-input +error: invalid character +./c++.at:1360: $PREPARSER ./input i +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -17.5: syntax error -./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:1360: $PREPARSER ./input aaaap stderr: -./glr-regression.at:205: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr1 glr-regr1.c $LIBS -./cxx-type.at:441: $PREPARSER ./types -p test-input -stdout: stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +types.y:77.8-37: warning: unset value: $$ [-Wother] +types.y: warning: 1 reduce/reduce conflict [-Wconflicts-rr] +types.y: note: rerun with option '-Wcounterexamples' to generate conflict counterexamples +./cxx-type.at:456: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o types types.c $LIBS +./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: -./c++.at:1555: ./check -stdout: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 71): --> $$ = nterm prog (1.1: ) -Entering state 1 -Reading a token -Next token is token ID (3.0: ) -Shifting token ID (3.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.0: ) --> $$ = nterm expr (3.0: ) -Entering state 8 -Reading a token -Next token is token '+' (3.2: ) -Shifting token '+' (3.2: ) -Entering state 15 -Reading a token -Next token is token ID (3.4: ) -Shifting token ID (3.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.4: ) --> $$ = nterm expr (3.4: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (3.0: ) - $2 = token '+' (3.2: ) - $3 = nterm expr (3.4: ) --> $$ = nterm expr (3.0-4: ) -Entering state 8 -Reading a token -Next token is token ';' (3.5: ) -Shifting token ';' (3.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (3.0-4: ) - $2 = token ';' (3.5: ) --> $$ = nterm stmt (3.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1: ) - $2 = nterm stmt (3.0-5: ) --> $$ = nterm prog (1.1-3.5: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (5.0: ) -Shifting token TYPENAME (5.0: ) -Entering state 4 +Stack now 0 Reading a token -Next token is token ID (5.2: ) -Shifting token ID (5.2: ) +0x55bebe153b40->Object::Object { } +Next token is token 'a' (0x55bebe153b40 'a') +Shifting token 'a' (0x55bebe153b40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55bebe153b40 'a') +-> $$ = nterm item (0x55bebe153b40 'a') Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (5.2: ) --> $$ = nterm declarator (5.2: ) -Entering state 13 -Reading a token -Next token is token ';' (5.3: ) -Shifting token ';' (5.3: ) -Entering state 23 -Reducing stack 0 by rule 11 (line 97): - $1 = token TYPENAME (5.0: ) - $2 = nterm declarator (5.2: ) - $3 = token ';' (5.3: ) --> $$ = nterm decl (5.0-3: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (5.0-3: ) --> $$ = nterm stmt (5.0-3: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-3.5: ) - $2 = nterm stmt (5.0-3: ) --> $$ = nterm prog (1.1-5.3: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (7.0: ) -Shifting token TYPENAME (7.0: ) -Entering state 4 +Stack now 0 11 Reading a token -Next token is token ID (7.2: ) -Shifting token ID (7.2: ) +0x55bebe153b90->Object::Object { 0x55bebe153b40 } +Next token is token 'a' (0x55bebe153b90 'a') +Shifting token 'a' (0x55bebe153b90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55bebe153b90 'a') +-> $$ = nterm item (0x55bebe153b90 'a') Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (7.2: ) --> $$ = nterm declarator (7.2: ) -Entering state 13 -Reading a token -Next token is token '=' (7.4: ) -Shifting token '=' (7.4: ) -Entering state 22 -Reading a token -Next token is token ID (7.6: ) -Shifting token ID (7.6: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (7.6: ) --> $$ = nterm expr (7.6: ) -Entering state 29 -Reading a token -Next token is token ';' (7.7: ) -Shifting token ';' (7.7: ) -Entering state 30 -Reducing stack 0 by rule 12 (line 99): - $1 = token TYPENAME (7.0: ) - $2 = nterm declarator (7.2: ) - $3 = token '=' (7.4: ) - $4 = nterm expr (7.6: ) - $5 = token ';' (7.7: ) --> $$ = nterm decl (7.0-7: ) -Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (7.0-7: ) --> $$ = nterm stmt (7.0-7: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-5.3: ) - $2 = nterm stmt (7.0-7: ) --> $$ = nterm prog (1.1-7.7: ) -Entering state 1 -Reading a token -Next token is token ID (9.0: ) -Shifting token ID (9.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.0: ) --> $$ = nterm expr (9.0: ) -Entering state 8 -Reading a token -Next token is token '=' (9.2: ) -Shifting token '=' (9.2: ) -Entering state 14 -Reading a token -Next token is token ID (9.4: ) -Shifting token ID (9.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.4: ) --> $$ = nterm expr (9.4: ) -Entering state 24 -Reading a token -Next token is token ';' (9.5: ) -Reducing stack 0 by rule 10 (line 94): - $1 = nterm expr (9.0: ) - $2 = token '=' (9.2: ) - $3 = nterm expr (9.4: ) --> $$ = nterm expr (9.0-4: ) -Entering state 8 -Next token is token ';' (9.5: ) -Shifting token ';' (9.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (9.0-4: ) - $2 = token ';' (9.5: ) --> $$ = nterm stmt (9.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-7.7: ) - $2 = nterm stmt (9.0-5: ) --> $$ = nterm prog (1.1-9.5: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (11.0: ) -Shifting token TYPENAME (11.0: ) -Entering state 4 -Reading a token -Next token is token '(' (11.2: ) -Shifting token '(' (11.2: ) -Entering state 12 -Reading a token -Next token is token ID (11.3: ) -Shifting token ID (11.3: ) -Entering state 18 -Reading a token -Next token is token ')' (11.4: ) -Stack 0 Entering state 18 -Next token is token ')' (11.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (11.4: ) -Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -On stack 0, shifting token ')' (11.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (11.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '+' (11.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '+' (11.6: ) -Stack 1 dies. -Removing dead stacks. -On stack 0, shifting token '+' (11.6: ) -Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (11.3: ) --> $$ = nterm expr (11.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (11.0: ) - $2 = token '(' (11.2: ) - $3 = nterm expr (11.3: ) - $4 = token ')' (11.4: ) --> $$ = nterm expr (11.0-4: ) -Returning to deterministic operation. -Entering state 15 -Reading a token -Next token is token ID (11.8: ) -Shifting token ID (11.8: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (11.8: ) --> $$ = nterm expr (11.8: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (11.0-4: ) - $2 = token '+' (11.6: ) - $3 = nterm expr (11.8: ) --> $$ = nterm expr (11.0-8: ) -Entering state 8 -Reading a token -Next token is token ';' (11.9: ) -Shifting token ';' (11.9: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (11.0-8: ) - $2 = token ';' (11.9: ) --> $$ = nterm stmt (11.0-9: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-9.5: ) - $2 = nterm stmt (11.0-9: ) --> $$ = nterm prog (1.1-11.9: ) -Entering state 1 -Reading a token -Next token is token TYPENAME (13.0: ) -Shifting token TYPENAME (13.0: ) -Entering state 4 -Reading a token -Next token is token '(' (13.2: ) -Shifting token '(' (13.2: ) -Entering state 12 -Reading a token -Next token is token ID (13.3: ) -Shifting token ID (13.3: ) -Entering state 18 -Reading a token -Next token is token ')' (13.4: ) -Stack 0 Entering state 18 -Next token is token ')' (13.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (13.4: ) -Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -On stack 0, shifting token ')' (13.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (13.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token ';' (13.5: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token ';' (13.5: ) -On stack 0, shifting token ';' (13.5: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (13.5: ) -Stack 1 now in state 23 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (15.0: ) -Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (15.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (13.3: ) --> $$ = nterm declarator (13.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (13.2: ) - $2 = nterm declarator (13.3: ) - $3 = token ')' (13.4: ) --> $$ = nterm declarator (13.2-4: ) -Reducing stack -1 by rule 11 (line 97): - $1 = token TYPENAME (13.0: ) - $2 = nterm declarator (13.2-4: ) - $3 = token ';' (13.5: ) --> $$ = nterm decl (13.0-5: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (13.0-5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (13.3: ) --> $$ = nterm expr (13.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (13.0: ) - $2 = token '(' (13.2: ) - $3 = nterm expr (13.3: ) - $4 = token ')' (13.4: ) --> $$ = nterm expr (13.0-4: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (13.0-4: ) - $2 = token ';' (13.5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-11.9: ) - $2 = nterm stmt (13.0-5: ) --> $$ = nterm prog (1.1-13.5: ) -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' (15.2: ) -Shifting token '(' (15.2: ) -Entering state 12 -Reading a token -Next token is token ID (15.3: ) -Shifting token ID (15.3: ) -Entering state 18 -Reading a token -Next token is token ')' (15.4: ) -Stack 0 Entering state 18 -Next token is token ')' (15.4: ) -Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. -Stack 0 Entering state 20 -Next token is token ')' (15.4: ) -Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -On stack 0, shifting token ')' (15.4: ) -Stack 0 now in state 27 -On stack 1, shifting token ')' (15.4: ) -Stack 1 now in state 28 -Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. -Stack 0 Entering state 8 -Reading a token -Next token is token '=' (15.6: ) -Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. -Stack 1 Entering state 13 -Next token is token '=' (15.6: ) -On stack 0, shifting token '=' (15.6: ) -Stack 0 now in state 14 -On stack 1, shifting token '=' (15.6: ) -Stack 1 now in state 22 -Stack 0 Entering state 14 -Reading a token -Next token is token ID (15.8: ) -Stack 1 Entering state 22 -Next token is token ID (15.8: ) -On stack 0, shifting token ID (15.8: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.8: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token '+' (15.10: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token '+' (15.10: ) -On stack 0, shifting token '+' (15.10: ) -Stack 0 now in state 15 -On stack 1, shifting token '+' (15.10: ) -Stack 1 now in state 15 -Stack 0 Entering state 15 -Reading a token -Next token is token ID (15.12: ) -Stack 1 Entering state 15 -Next token is token ID (15.12: ) -On stack 0, shifting token ID (15.12: ) -Stack 0 now in state 5 -On stack 1, shifting token ID (15.12: ) -Stack 1 now in state 5 -Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. -Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. -Stack 0 Entering state 24 -Reading a token -Next token is token ';' (15.13: ) -Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. -Stack 0 Entering state 8 -Next token is token ';' (15.13: ) -Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. -Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. -Stack 1 Entering state 29 -Next token is token ';' (15.13: ) -On stack 0, shifting token ';' (15.13: ) -Stack 0 now in state 16 -On stack 1, shifting token ';' (15.13: ) -Stack 1 now in state 30 -Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. -Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. -Stack 0 Entering state 1 -Reading a token -Next token is token TYPENAME (17.0: ) -Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. -Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. -Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. -Merging stack 1 into stack 0. -Removing dead stacks. -On stack 0, shifting token TYPENAME (17.0: ) -Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (15.3: ) --> $$ = nterm declarator (15.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (15.2: ) - $2 = nterm declarator (15.3: ) - $3 = token ')' (15.4: ) --> $$ = nterm declarator (15.2-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 12 (line 99): - $1 = token TYPENAME (15.0: ) - $2 = nterm declarator (15.2-4: ) - $3 = token '=' (15.6: ) - $4 = nterm expr (15.8-12: ) - $5 = token ';' (15.13: ) --> $$ = nterm decl (15.0-13: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (15.0-13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.3: ) --> $$ = nterm expr (15.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (15.0: ) - $2 = token '(' (15.2: ) - $3 = nterm expr (15.3: ) - $4 = token ')' (15.4: ) --> $$ = nterm expr (15.0-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 10 (line 94): - $1 = nterm expr (15.0-4: ) - $2 = token '=' (15.6: ) - $3 = nterm expr (15.8-12: ) --> $$ = nterm expr (15.0-12: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (15.0-12: ) - $2 = token ';' (15.13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-13.5: ) - $2 = nterm stmt (15.0-13: ) --> $$ = nterm prog (1.1-15.13: ) -Returning to deterministic operation. -Entering state 4 -Reading a token -Next token is token '(' (17.2: ) -Shifting token '(' (17.2: ) -Entering state 12 -Reading a token -Next token is token ID (17.3: ) -Shifting token ID (17.3: ) -Entering state 18 -Reading a token -Next token is token ID (17.5: ) -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (17.3: ) --> $$ = nterm expr (17.3: ) -Entering state 20 -Next token is token ID (17.5: ) -17.5: syntax error -Error: popping nterm expr (17.3: ) -Error: popping token '(' (17.2: ) -Error: popping token TYPENAME (17.0: ) -Shifting token error (17.0-5: ) -Entering state 3 -Next token is token ID (17.5: ) -Error: discarding token ID (17.5: ) -Reading a token -Next token is token ')' (17.6: ) -Error: discarding token ')' (17.6: ) -Reading a token -Next token is token '=' (17.8: ) -Error: discarding token '=' (17.8: ) -Reading a token -Next token is token ID (17.10: ) -Error: discarding token ID (17.10: ) +Stack now 0 11 11 Reading a token -Next token is token '+' (17.12: ) -Error: discarding token '+' (17.12: ) +0x55bebe153be0->Object::Object { 0x55bebe153b40, 0x55bebe153b90 } +Next token is token 'a' (0x55bebe153be0 'a') +Shifting token 'a' (0x55bebe153be0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55bebe153be0 'a') +-> $$ = nterm item (0x55bebe153be0 'a') +Entering state 11 +Stack now 0 11 11 11 Reading a token -Next token is token ID (17.14: ) -Error: discarding token ID (17.14: ) +0x55bebe153c30->Object::Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0 } +Next token is token 'a' (0x55bebe153c30 'a') +Shifting token 'a' (0x55bebe153c30 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55bebe153c30 'a') +-> $$ = nterm item (0x55bebe153c30 'a') +Entering state 11 +Stack now 0 11 11 11 11 Reading a token -Next token is token ';' (17.15: ) -Entering state 3 -Next token is token ';' (17.15: ) -Shifting token ';' (17.15: ) -Entering state 10 -Reducing stack 0 by rule 5 (line 86): - $1 = token error (17.0-14: ) - $2 = token ';' (17.15: ) --> $$ = nterm stmt (17.0-15: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-15.13: ) - $2 = nterm stmt (17.0-15: ) --> $$ = nterm prog (1.1-17.15: ) -Entering state 1 +0x55bebe153c80->Object::Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0, 0x55bebe153c30 } +Next token is token 'p' (0x55bebe153c80 'p'Exception caught: cleaning lookahead and stack +0x55bebe153c80->Object::~Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0, 0x55bebe153c30, 0x55bebe153c80 } +0x55bebe153c30->Object::~Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0, 0x55bebe153c30 } +0x55bebe153be0->Object::~Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0 } +0x55bebe153b90->Object::~Object { 0x55bebe153b40, 0x55bebe153b90 } +0x55bebe153b40->Object::~Object { 0x55bebe153b40 } +exception caught: printer +end { } +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 Reading a token -Next token is token ID (19.0: ) -Shifting token ID (19.0: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.0: ) --> $$ = nterm expr (19.0: ) -Entering state 8 +0x55bebe153b40->Object::Object { } +Next token is token 'a' (0x55bebe153b40 'a') +Shifting token 'a' (0x55bebe153b40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55bebe153b40 'a') +-> $$ = nterm item (0x55bebe153b40 'a') +Entering state 11 +Stack now 0 11 Reading a token -Next token is token '+' (19.2: ) -Shifting token '+' (19.2: ) -Entering state 15 +0x55bebe153b90->Object::Object { 0x55bebe153b40 } +Next token is token 'a' (0x55bebe153b90 'a') +Shifting token 'a' (0x55bebe153b90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55bebe153b90 'a') +-> $$ = nterm item (0x55bebe153b90 'a') +Entering state 11 +Stack now 0 11 11 Reading a token -Next token is token ID (19.4: ) -Shifting token ID (19.4: ) -Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.4: ) --> $$ = nterm expr (19.4: ) -Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (19.0: ) - $2 = token '+' (19.2: ) - $3 = nterm expr (19.4: ) --> $$ = nterm expr (19.0-4: ) -Entering state 8 +0x55bebe153be0->Object::Object { 0x55bebe153b40, 0x55bebe153b90 } +Next token is token 'a' (0x55bebe153be0 'a') +Shifting token 'a' (0x55bebe153be0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55bebe153be0 'a') +-> $$ = nterm item (0x55bebe153be0 'a') +Entering state 11 +Stack now 0 11 11 11 Reading a token -Next token is token ';' (19.5: ) -Shifting token ';' (19.5: ) -Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (19.0-4: ) - $2 = token ';' (19.5: ) --> $$ = nterm stmt (19.0-5: ) -Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-17.15: ) - $2 = nterm stmt (19.0-5: ) --> $$ = nterm prog (1.1-19.5: ) -Entering state 1 +0x55bebe153c30->Object::Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0 } +Next token is token 'a' (0x55bebe153c30 'a') +Shifting token 'a' (0x55bebe153c30 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55bebe153c30 'a') +-> $$ = nterm item (0x55bebe153c30 'a') +Entering state 11 +Stack now 0 11 11 11 11 Reading a token -Next token is token '@' (21.0: ) -Shifting token '@' (21.0: ) -Entering state 6 -Reducing stack 0 by rule 6 (line 87): - $1 = token '@' (21.0: ) -Cleanup: popping nterm prog (1.1-19.5: ) -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +0x55bebe153c80->Object::Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0, 0x55bebe153c30 } +Next token is token 'p' (0x55bebe153c80 'p'Exception caught: cleaning lookahead and stack +0x55bebe153c80->Object::~Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0, 0x55bebe153c30, 0x55bebe153c80 } +0x55bebe153c30->Object::~Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0, 0x55bebe153c30 } +0x55bebe153be0->Object::~Object { 0x55bebe153b40, 0x55bebe153b90, 0x55bebe153be0 } +0x55bebe153b90->Object::~Object { 0x55bebe153b40, 0x55bebe153b90 } +0x55bebe153b40->Object::~Object { 0x55bebe153b40 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +./c++.at:1360: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +stdout: +./c++.at:1360: $PREPARSER ./input aaaaE ./cxx-type.at:447: $PREPARSER ./types test-input -./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' stderr: syntax error stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: reduction ./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:447: $PREPARSER ./types -p test-input +stderr: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 71): --> $$ = nterm prog (1.1: ) +Reducing stack 0 by rule 1 (line 64): +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (3.0: ) -Shifting token ID (3.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.0: ) --> $$ = nterm expr (3.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '+' (3.2: ) -Shifting token '+' (3.2: ) +Next token is token '+' () +Shifting token '+' () Entering state 15 Reading a token -Next token is token ID (3.4: ) -Shifting token ID (3.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (3.4: ) --> $$ = nterm expr (3.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (3.0: ) - $2 = token '+' (3.2: ) - $3 = nterm expr (3.4: ) --> $$ = nterm expr (3.0-4: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (3.5: ) -Shifting token ';' (3.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (3.0-4: ) - $2 = token ';' (3.5: ) --> $$ = nterm stmt (3.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1: ) - $2 = nterm stmt (3.0-5: ) --> $$ = nterm prog (1.1-3.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (5.0: ) -Shifting token TYPENAME (5.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token ID (5.2: ) -Shifting token ID (5.2: ) +Next token is token ID () +Shifting token ID () Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (5.2: ) --> $$ = nterm declarator (5.2: ) +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () Entering state 13 Reading a token -Next token is token ';' (5.3: ) -Shifting token ';' (5.3: ) +Next token is token ';' () +Shifting token ';' () Entering state 23 -Reducing stack 0 by rule 11 (line 97): - $1 = token TYPENAME (5.0: ) - $2 = nterm declarator (5.2: ) - $3 = token ';' (5.3: ) --> $$ = nterm decl (5.0-3: ) +Reducing stack 0 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (5.0-3: ) --> $$ = nterm stmt (5.0-3: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-3.5: ) - $2 = nterm stmt (5.0-3: ) --> $$ = nterm prog (1.1-5.3: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (7.0: ) -Shifting token TYPENAME (7.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token ID (7.2: ) -Shifting token ID (7.2: ) +Next token is token ID () +Shifting token ID () Entering state 11 -Reducing stack 0 by rule 13 (line 104): - $1 = token ID (7.2: ) --> $$ = nterm declarator (7.2: ) +Reducing stack 0 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () Entering state 13 Reading a token -Next token is token '=' (7.4: ) -Shifting token '=' (7.4: ) +Next token is token '=' () +Shifting token '=' () Entering state 22 Reading a token -Next token is token ID (7.6: ) -Shifting token ID (7.6: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (7.6: ) --> $$ = nterm expr (7.6: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 29 Reading a token -Next token is token ';' (7.7: ) -Shifting token ';' (7.7: ) +Next token is token ';' () +Shifting token ';' () Entering state 30 -Reducing stack 0 by rule 12 (line 99): - $1 = token TYPENAME (7.0: ) - $2 = nterm declarator (7.2: ) - $3 = token '=' (7.4: ) - $4 = nterm expr (7.6: ) - $5 = token ';' (7.7: ) --> $$ = nterm decl (7.0-7: ) +Reducing stack 0 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () Entering state 9 -Reducing stack 0 by rule 4 (line 85): - $1 = nterm decl (7.0-7: ) --> $$ = nterm stmt (7.0-7: ) +Reducing stack 0 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-5.3: ) - $2 = nterm stmt (7.0-7: ) --> $$ = nterm prog (1.1-7.7: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (9.0: ) -Shifting token ID (9.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.0: ) --> $$ = nterm expr (9.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '=' (9.2: ) -Shifting token '=' (9.2: ) +Next token is token '=' () +Shifting token '=' () Entering state 14 Reading a token -Next token is token ID (9.4: ) -Shifting token ID (9.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (9.4: ) --> $$ = nterm expr (9.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 24 Reading a token -Next token is token ';' (9.5: ) -Reducing stack 0 by rule 10 (line 94): - $1 = nterm expr (9.0: ) - $2 = token '=' (9.2: ) - $3 = nterm expr (9.4: ) --> $$ = nterm expr (9.0-4: ) +Next token is token ';' () +Reducing stack 0 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 -Next token is token ';' (9.5: ) -Shifting token ';' (9.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (9.0-4: ) - $2 = token ';' (9.5: ) --> $$ = nterm stmt (9.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-7.7: ) - $2 = nterm stmt (9.0-5: ) --> $$ = nterm prog (1.1-9.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (11.0: ) -Shifting token TYPENAME (11.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token '(' (11.2: ) -Shifting token '(' (11.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (11.3: ) -Shifting token ID (11.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ')' (11.4: ) +Next token is token ')' () Stack 0 Entering state 18 -Next token is token ')' (11.4: ) +Next token is token ')' () Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' (11.4: ) +Next token is token ')' () Stack 1 Entering state 21 -Next token is token ')' (11.4: ) -On stack 0, shifting token ')' (11.4: ) +Next token is token ')' () +On stack 0, shifting token ')' () Stack 0 now in state 27 -On stack 1, shifting token ')' (11.4: ) +On stack 1, shifting token ')' () Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '+' (11.6: ) +Next token is token '+' () Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '+' (11.6: ) +Next token is token '+' () Stack 1 dies. Removing dead stacks. -On stack 0, shifting token '+' (11.6: ) +On stack 0, shifting token '+' () Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (11.3: ) --> $$ = nterm expr (11.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (11.0: ) - $2 = token '(' (11.2: ) - $3 = nterm expr (11.3: ) - $4 = token ')' (11.4: ) --> $$ = nterm expr (11.0-4: ) +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () Returning to deterministic operation. Entering state 15 Reading a token -Next token is token ID (11.8: ) -Shifting token ID (11.8: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (11.8: ) --> $$ = nterm expr (11.8: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (11.0-4: ) - $2 = token '+' (11.6: ) - $3 = nterm expr (11.8: ) --> $$ = nterm expr (11.0-8: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (11.9: ) -Shifting token ';' (11.9: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (11.0-8: ) - $2 = token ';' (11.9: ) --> $$ = nterm stmt (11.0-9: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-9.5: ) - $2 = nterm stmt (11.0-9: ) --> $$ = nterm prog (1.1-11.9: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token TYPENAME (13.0: ) -Shifting token TYPENAME (13.0: ) +Next token is token TYPENAME () +Shifting token TYPENAME () Entering state 4 Reading a token -Next token is token '(' (13.2: ) -Shifting token '(' (13.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (13.3: ) -Shifting token ID (13.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ')' (13.4: ) +Next token is token ')' () Stack 0 Entering state 18 -Next token is token ')' (13.4: ) +Next token is token ')' () Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' (13.4: ) +Next token is token ')' () Stack 1 Entering state 21 -Next token is token ')' (13.4: ) -On stack 0, shifting token ')' (13.4: ) +Next token is token ')' () +On stack 0, shifting token ')' () Stack 0 now in state 27 -On stack 1, shifting token ')' (13.4: ) +On stack 1, shifting token ')' () Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token ';' (13.5: ) +Next token is token ';' () Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token ';' (13.5: ) -On stack 0, shifting token ';' (13.5: ) +Next token is token ';' () +On stack 0, shifting token ';' () Stack 0 now in state 16 -On stack 1, shifting token ';' (13.5: ) +On stack 1, shifting token ';' () Stack 1 now in state 23 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME (15.0: ) +Next token is token TYPENAME () Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. +Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME (15.0: ) +On stack 0, shifting token TYPENAME () Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (13.3: ) --> $$ = nterm declarator (13.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (13.2: ) - $2 = nterm declarator (13.3: ) - $3 = token ')' (13.4: ) --> $$ = nterm declarator (13.2-4: ) -Reducing stack -1 by rule 11 (line 97): - $1 = token TYPENAME (13.0: ) - $2 = nterm declarator (13.2-4: ) - $3 = token ';' (13.5: ) --> $$ = nterm decl (13.0-5: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (13.0-5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (13.3: ) --> $$ = nterm expr (13.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (13.0: ) - $2 = token '(' (13.2: ) - $3 = nterm expr (13.3: ) - $4 = token ')' (13.4: ) --> $$ = nterm expr (13.0-4: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (13.0-4: ) - $2 = token ';' (13.5: ) --> $$ = nterm stmt (13.0-5: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-11.9: ) - $2 = nterm stmt (13.0-5: ) --> $$ = nterm prog (1.1-13.5: ) +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 11 (line 87): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' (15.2: ) -Shifting token '(' (15.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (15.3: ) -Shifting token ID (15.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ')' (15.4: ) +Next token is token ')' () Stack 0 Entering state 18 -Next token is token ')' (15.4: ) +Next token is token ')' () Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Next token is token ')' () +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' (15.4: ) +Next token is token ')' () Stack 1 Entering state 21 -Next token is token ')' (15.4: ) -On stack 0, shifting token ')' (15.4: ) +Next token is token ')' () +On stack 0, shifting token ')' () Stack 0 now in state 27 -On stack 1, shifting token ')' (15.4: ) +On stack 1, shifting token ')' () Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '=' (15.6: ) +Next token is token '=' () Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '=' (15.6: ) -On stack 0, shifting token '=' (15.6: ) +Next token is token '=' () +On stack 0, shifting token '=' () Stack 0 now in state 14 -On stack 1, shifting token '=' (15.6: ) +On stack 1, shifting token '=' () Stack 1 now in state 22 Stack 0 Entering state 14 Reading a token -Next token is token ID (15.8: ) +Next token is token ID () Stack 1 Entering state 22 -Next token is token ID (15.8: ) -On stack 0, shifting token ID (15.8: ) +Next token is token ID () +On stack 0, shifting token ID () Stack 0 now in state 5 -On stack 1, shifting token ID (15.8: ) +On stack 1, shifting token ID () Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token '+' (15.10: ) +Next token is token '+' () Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token '+' (15.10: ) -On stack 0, shifting token '+' (15.10: ) +Next token is token '+' () +On stack 0, shifting token '+' () Stack 0 now in state 15 -On stack 1, shifting token '+' (15.10: ) +On stack 1, shifting token '+' () Stack 1 now in state 15 Stack 0 Entering state 15 Reading a token -Next token is token ID (15.12: ) +Next token is token ID () Stack 1 Entering state 15 -Next token is token ID (15.12: ) -On stack 0, shifting token ID (15.12: ) +Next token is token ID () +On stack 0, shifting token ID () Stack 0 now in state 5 -On stack 1, shifting token ID (15.12: ) +On stack 1, shifting token ID () Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. +Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. +Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token ';' (15.13: ) -Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. +Next token is token ';' () +Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. Stack 0 Entering state 8 -Next token is token ';' (15.13: ) +Next token is token ';' () Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. +Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. +Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token ';' (15.13: ) -On stack 0, shifting token ';' (15.13: ) +Next token is token ';' () +On stack 0, shifting token ';' () Stack 0 now in state 16 -On stack 1, shifting token ';' (15.13: ) +On stack 1, shifting token ';' () Stack 1 now in state 30 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME (17.0: ) +Next token is token TYPENAME () Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. +Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME (17.0: ) +On stack 0, shifting token TYPENAME () Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 104): - $1 = token ID (15.3: ) --> $$ = nterm declarator (15.3: ) -Reducing stack -1 by rule 14 (line 105): - $1 = token '(' (15.2: ) - $2 = nterm declarator (15.3: ) - $3 = token ')' (15.4: ) --> $$ = nterm declarator (15.2-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 12 (line 99): - $1 = token TYPENAME (15.0: ) - $2 = nterm declarator (15.2-4: ) - $3 = token '=' (15.6: ) - $4 = nterm expr (15.8-12: ) - $5 = token ';' (15.13: ) --> $$ = nterm decl (15.0-13: ) -Reducing stack -1 by rule 4 (line 85): - $1 = nterm decl (15.0-13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.3: ) --> $$ = nterm expr (15.3: ) -Reducing stack -1 by rule 8 (line 91): - $1 = token TYPENAME (15.0: ) - $2 = token '(' (15.2: ) - $3 = nterm expr (15.3: ) - $4 = token ')' (15.4: ) --> $$ = nterm expr (15.0-4: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.8: ) --> $$ = nterm expr (15.8: ) -Reducing stack -1 by rule 7 (line 90): - $1 = token ID (15.12: ) --> $$ = nterm expr (15.12: ) -Reducing stack -1 by rule 9 (line 93): - $1 = nterm expr (15.8: ) - $2 = token '+' (15.10: ) - $3 = nterm expr (15.12: ) --> $$ = nterm expr (15.8-12: ) -Reducing stack -1 by rule 10 (line 94): - $1 = nterm expr (15.0-4: ) - $2 = token '=' (15.6: ) - $3 = nterm expr (15.8-12: ) --> $$ = nterm expr (15.0-12: ) -Reducing stack -1 by rule 3 (line 84): - $1 = nterm expr (15.0-12: ) - $2 = token ';' (15.13: ) --> $$ = nterm stmt (15.0-13: ) -Reducing stack -1 by rule 2 (line 72): - $1 = nterm prog (1.1-13.5: ) - $2 = nterm stmt (15.0-13: ) --> $$ = nterm prog (1.1-15.13: ) +Reducing stack -1 by rule 13 (line 94): + $1 = token ID () +-> $$ = nterm declarator () +Reducing stack -1 by rule 14 (line 95): + $1 = token '(' () + $2 = nterm declarator () + $3 = token ')' () +-> $$ = nterm declarator () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 12 (line 89): + $1 = token TYPENAME () + $2 = nterm declarator () + $3 = token '=' () + $4 = nterm expr () + $5 = token ';' () +-> $$ = nterm decl () +Reducing stack -1 by rule 4 (line 75): + $1 = nterm decl () +-> $$ = nterm stmt () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 8 (line 81): + $1 = token TYPENAME () + $2 = token '(' () + $3 = nterm expr () + $4 = token ')' () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () +Reducing stack -1 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 10 (line 84): + $1 = nterm expr () + $2 = token '=' () + $3 = nterm expr () +-> $$ = nterm expr () +Reducing stack -1 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () +Reducing stack -1 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' (17.2: ) -Shifting token '(' (17.2: ) +Next token is token '(' () +Shifting token '(' () Entering state 12 Reading a token -Next token is token ID (17.3: ) -Shifting token ID (17.3: ) +Next token is token ID () +Shifting token ID () Entering state 18 Reading a token -Next token is token ID (17.5: ) -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (17.3: ) --> $$ = nterm expr (17.3: ) +Next token is token ID () +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 20 -Next token is token ID (17.5: ) -17.5: syntax error -Error: popping nterm expr (17.3: ) -Error: popping token '(' (17.2: ) -Error: popping token TYPENAME (17.0: ) -Shifting token error (17.0-5: ) +Next token is token ID () +syntax error +Error: popping nterm expr () +Error: popping token '(' () +Error: popping token TYPENAME () +Shifting token error () Entering state 3 -Next token is token ID (17.5: ) -Error: discarding token ID (17.5: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token ')' (17.6: ) -Error: discarding token ')' (17.6: ) +Next token is token ')' () +Error: discarding token ')' () Reading a token -Next token is token '=' (17.8: ) -Error: discarding token '=' (17.8: ) +Next token is token '=' () +Error: discarding token '=' () Reading a token -Next token is token ID (17.10: ) -Error: discarding token ID (17.10: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token '+' (17.12: ) -Error: discarding token '+' (17.12: ) +Next token is token '+' () +Error: discarding token '+' () Reading a token -Next token is token ID (17.14: ) -Error: discarding token ID (17.14: ) +Next token is token ID () +Error: discarding token ID () Reading a token -Next token is token ';' (17.15: ) +Next token is token ';' () Entering state 3 -Next token is token ';' (17.15: ) -Shifting token ';' (17.15: ) +Next token is token ';' () +Shifting token ';' () Entering state 10 -Reducing stack 0 by rule 5 (line 86): - $1 = token error (17.0-14: ) - $2 = token ';' (17.15: ) --> $$ = nterm stmt (17.0-15: ) +Reducing stack 0 by rule 5 (line 76): + $1 = token error () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-15.13: ) - $2 = nterm stmt (17.0-15: ) --> $$ = nterm prog (1.1-17.15: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token ID (19.0: ) -Shifting token ID (19.0: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.0: ) --> $$ = nterm expr (19.0: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token '+' (19.2: ) -Shifting token '+' (19.2: ) +Next token is token '+' () +Shifting token '+' () Entering state 15 Reading a token -Next token is token ID (19.4: ) -Shifting token ID (19.4: ) +Next token is token ID () +Shifting token ID () Entering state 5 -Reducing stack 0 by rule 7 (line 90): - $1 = token ID (19.4: ) --> $$ = nterm expr (19.4: ) +Reducing stack 0 by rule 7 (line 80): + $1 = token ID () +-> $$ = nterm expr () Entering state 25 -Reducing stack 0 by rule 9 (line 93): - $1 = nterm expr (19.0: ) - $2 = token '+' (19.2: ) - $3 = nterm expr (19.4: ) --> $$ = nterm expr (19.0-4: ) +Reducing stack 0 by rule 9 (line 83): + $1 = nterm expr () + $2 = token '+' () + $3 = nterm expr () +-> $$ = nterm expr () Entering state 8 Reading a token -Next token is token ';' (19.5: ) -Shifting token ';' (19.5: ) +Next token is token ';' () +Shifting token ';' () Entering state 16 -Reducing stack 0 by rule 3 (line 84): - $1 = nterm expr (19.0-4: ) - $2 = token ';' (19.5: ) --> $$ = nterm stmt (19.0-5: ) +Reducing stack 0 by rule 3 (line 74): + $1 = nterm expr () + $2 = token ';' () +-> $$ = nterm stmt () Entering state 7 -Reducing stack 0 by rule 2 (line 72): - $1 = nterm prog (1.1-17.15: ) - $2 = nterm stmt (19.0-5: ) --> $$ = nterm prog (1.1-19.5: ) +Reducing stack 0 by rule 2 (line 65): + $1 = nterm prog () + $2 = nterm stmt () +-> $$ = nterm prog () Entering state 1 Reading a token -Next token is token '@' (21.0: ) -Shifting token '@' (21.0: ) +Next token is token '@' () +Shifting token '@' () Entering state 6 -Reducing stack 0 by rule 6 (line 87): - $1 = token '@' (21.0: ) -Cleanup: popping nterm prog (1.1-19.5: ) -710. cxx-type.at:438: ok -./cxx-type.at:447: $PREPARSER ./types -p test-input +Reducing stack 0 by rule 6 (line 77): + $1 = token '@' () +Cleanup: popping nterm prog () +./c++.at:1363: $PREPARSER ./input aaaal +./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaT stderr: Starting parse Entering state 0 @@ -262340,625 +261613,1712 @@ $1 = token '@' () Cleanup: popping nterm prog () stderr: -stdout: +711. cxx-type.at:444: ok +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input i +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS -./cxx-type.at:452: $PREPARSER ./types test-input +======== Testing with C++ standard flags: '' +./c++.at:1363: $PREPARSER ./input aaaap +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1363: $PREPARSER ./input --debug aaaap +stdout: stderr: Starting parse Entering state 0 -Reducing stack 0 by rule 1 (line 64): --> $$ = nterm prog () +Stack now 0 +Reading a token +0x7fff31d07adf->Object::Object { } +0x7fff31d07ba0->Object::Object { 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'a' (0x7fff31d07ba0 'a') +0x7fff31d07b10->Object::Object { 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::Object { 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::~Object { 0x7fff31d07ac7, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x7fff31d07b10, 0x7fff31d07ba0 } +Shifting token 'a' (0x7fff31d07b10 'a') +0x55dfdfadbee0->Object::Object { 0x7fff31d07b10 } +0x7fff31d07a9f->Object::Object { 0x55dfdfadbee0, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07a9f, 0x7fff31d07b10 } +0x7fff31d07b10->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07b10 } Entering state 1 +Stack now 0 1 +0x7fff31d07bc0->Object::Object { 0x55dfdfadbee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55dfdfadbee0 'a') +-> $$ = nterm item (0x7fff31d07bc0 'a') +0x55dfdfadbee0->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07bc0 } +0x55dfdfadbee0->Object::Object { 0x7fff31d07bc0 } +0x7fff31d07b78->Object::Object { 0x55dfdfadbee0, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07b78, 0x7fff31d07bc0 } +0x7fff31d07bc0->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07bc0 } +Entering state 10 +Stack now 0 10 Reading a token -Next token is token ID () -Shifting token ID () +0x7fff31d07adf->Object::Object { 0x55dfdfadbee0 } +0x7fff31d07ba0->Object::Object { 0x55dfdfadbee0, 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'a' (0x7fff31d07ba0 'a') +0x7fff31d07b10->Object::Object { 0x55dfdfadbee0, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::Object { 0x55dfdfadbee0, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07ac7, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07b10, 0x7fff31d07ba0 } +Shifting token 'a' (0x7fff31d07b10 'a') +0x55dfdfadbf00->Object::Object { 0x55dfdfadbee0, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07a9f, 0x7fff31d07b10 } +0x7fff31d07b10->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10 } +Entering state 1 +Stack now 0 10 1 +0x7fff31d07bc0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55dfdfadbf00 'a') +-> $$ = nterm item (0x7fff31d07bc0 'a') +0x55dfdfadbf00->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07bc0 } +0x55dfdfadbf00->Object::Object { 0x55dfdfadbee0, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b78, 0x7fff31d07bc0 } +0x7fff31d07bc0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07bc0 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7fff31d07adf->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00 } +0x7fff31d07ba0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'a' (0x7fff31d07ba0 'a') +0x7fff31d07b10->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07ac7, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10, 0x7fff31d07ba0 } +Shifting token 'a' (0x7fff31d07b10 'a') +0x55dfdfadbf20->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07a9f, 0x7fff31d07b10 } +0x7fff31d07b10->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10 } +Entering state 1 +Stack now 0 10 10 1 +0x7fff31d07bc0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55dfdfadbf20 'a') +-> $$ = nterm item (0x7fff31d07bc0 'a') +0x55dfdfadbf20->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07bc0 } +0x55dfdfadbf20->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b78, 0x7fff31d07bc0 } +0x7fff31d07bc0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07bc0 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7fff31d07adf->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20 } +0x7fff31d07ba0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'a' (0x7fff31d07ba0 'a') +0x7fff31d07b10->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07ac7, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10, 0x7fff31d07ba0 } +Shifting token 'a' (0x7fff31d07b10 'a') +0x55dfdfadbf40->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07a9f, 0x7fff31d07b10 } +0x7fff31d07b10->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07b10 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7fff31d07bc0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55dfdfadbf40 'a') +-> $$ = nterm item (0x7fff31d07bc0 'a') +0x55dfdfadbf40->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07bc0 } +0x55dfdfadbf40->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07b78, 0x7fff31d07bc0 } +0x7fff31d07bc0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07bc0 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7fff31d07adf->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40 } +0x7fff31d07ba0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'p' (0x7fff31d07ba0 'p'Exception caught: cleaning lookahead and stack +0x55dfdfadbf40->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07ba0 } +0x55dfdfadbf20->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07ba0 } +0x55dfdfadbf00->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07ba0 } +0x55dfdfadbee0->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x7fff31d07ba0 } +exception caught: printer +end { } +./cxx-type.at:441: $PREPARSER ./types test-input +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +17.5: syntax error +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7fff31d07adf->Object::Object { } +0x7fff31d07ba0->Object::Object { 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'a' (0x7fff31d07ba0 'a') +0x7fff31d07b10->Object::Object { 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::Object { 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::~Object { 0x7fff31d07ac7, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x7fff31d07b10, 0x7fff31d07ba0 } +Shifting token 'a' (0x7fff31d07b10 'a') +0x55dfdfadbee0->Object::Object { 0x7fff31d07b10 } +0x7fff31d07a9f->Object::Object { 0x55dfdfadbee0, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07a9f, 0x7fff31d07b10 } +0x7fff31d07b10->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07b10 } +Entering state 1 +Stack now 0 1 +0x7fff31d07bc0->Object::Object { 0x55dfdfadbee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55dfdfadbee0 'a') +-> $$ = nterm item (0x7fff31d07bc0 'a') +0x55dfdfadbee0->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07bc0 } +0x55dfdfadbee0->Object::Object { 0x7fff31d07bc0 } +0x7fff31d07b78->Object::Object { 0x55dfdfadbee0, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07b78, 0x7fff31d07bc0 } +0x7fff31d07bc0->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07bc0 } +Entering state 10 +Stack now 0 10 +Reading a token +0x7fff31d07adf->Object::Object { 0x55dfdfadbee0 } +0x7fff31d07ba0->Object::Object { 0x55dfdfadbee0, 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'a' (0x7fff31d07ba0 'a') +0x7fff31d07b10->Object::Object { 0x55dfdfadbee0, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::Object { 0x55dfdfadbee0, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07ac7, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07b10, 0x7fff31d07ba0 } +Shifting token 'a' (0x7fff31d07b10 'a') +0x55dfdfadbf00->Object::Object { 0x55dfdfadbee0, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07a9f, 0x7fff31d07b10 } +0x7fff31d07b10->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10 } +Entering state 1 +Stack now 0 10 1 +0x7fff31d07bc0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55dfdfadbf00 'a') +-> $$ = nterm item (0x7fff31d07bc0 'a') +0x55dfdfadbf00->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07bc0 } +0x55dfdfadbf00->Object::Object { 0x55dfdfadbee0, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b78, 0x7fff31d07bc0 } +0x7fff31d07bc0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07bc0 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7fff31d07adf->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00 } +0x7fff31d07ba0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'a' (0x7fff31d07ba0 'a') +0x7fff31d07b10->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07ac7, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10, 0x7fff31d07ba0 } +Shifting token 'a' (0x7fff31d07b10 'a') +0x55dfdfadbf20->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07a9f, 0x7fff31d07b10 } +0x7fff31d07b10->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10 } +Entering state 1 +Stack now 0 10 10 1 +0x7fff31d07bc0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55dfdfadbf20 'a') +-> $$ = nterm item (0x7fff31d07bc0 'a') +0x55dfdfadbf20->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07bc0 } +0x55dfdfadbf20->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b78, 0x7fff31d07bc0 } +0x7fff31d07bc0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07bc0 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7fff31d07adf->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20 } +0x7fff31d07ba0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'a' (0x7fff31d07ba0 'a') +0x7fff31d07b10->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ac7->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07ac7, 0x7fff31d07b10, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10, 0x7fff31d07ba0 } +Shifting token 'a' (0x7fff31d07b10 'a') +0x55dfdfadbf40->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07b10 } +0x7fff31d07a9f->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07a9f, 0x7fff31d07b10 } +0x7fff31d07b10->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07b10 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7fff31d07bc0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55dfdfadbf40 'a') +-> $$ = nterm item (0x7fff31d07bc0 'a') +0x55dfdfadbf40->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07bc0 } +0x55dfdfadbf40->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07bc0 } +0x7fff31d07b78->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07b78, 0x7fff31d07bc0 } +0x7fff31d07bc0->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07bc0 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7fff31d07adf->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40 } +0x7fff31d07ba0->Object::Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07adf } +0x7fff31d07adf->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07adf, 0x7fff31d07ba0 } +Next token is token 'p' (0x7fff31d07ba0 'p'Exception caught: cleaning lookahead and stack +0x55dfdfadbf40->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x55dfdfadbf40, 0x7fff31d07ba0 } +0x55dfdfadbf20->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x55dfdfadbf20, 0x7fff31d07ba0 } +0x55dfdfadbf00->Object::~Object { 0x55dfdfadbee0, 0x55dfdfadbf00, 0x7fff31d07ba0 } +0x55dfdfadbee0->Object::~Object { 0x55dfdfadbee0, 0x7fff31d07ba0 } +0x7fff31d07ba0->Object::~Object { 0x7fff31d07ba0 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr +./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae +./cxx-type.at:441: $PREPARSER ./types -p test-input +stderr: +exception caught: syntax error +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reducing stack 0 by rule 1 (line 71): +-> $$ = nterm prog (1.1: ) +Entering state 1 +Reading a token +Next token is token ID (3.0: ) +Shifting token ID (3.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.0: ) +-> $$ = nterm expr (3.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (3.2: ) +Shifting token '+' (3.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (3.4: ) +Shifting token ID (3.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.4: ) +-> $$ = nterm expr (3.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (3.0: ) + $2 = token '+' (3.2: ) + $3 = nterm expr (3.4: ) +-> $$ = nterm expr (3.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (3.5: ) +Shifting token ';' (3.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (3.0-4: ) + $2 = token ';' (3.5: ) +-> $$ = nterm stmt (3.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1: ) + $2 = nterm stmt (3.0-5: ) +-> $$ = nterm prog (1.1-3.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (5.0: ) +Shifting token TYPENAME (5.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (5.2: ) +Shifting token ID (5.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (5.2: ) +-> $$ = nterm declarator (5.2: ) Entering state 13 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (5.3: ) +Shifting token ';' (5.3: ) Entering state 23 -Reducing stack 0 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 11 (line 97): + $1 = token TYPENAME (5.0: ) + $2 = nterm declarator (5.2: ) + $3 = token ';' (5.3: ) +-> $$ = nterm decl (5.0-3: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (5.0-3: ) +-> $$ = nterm stmt (5.0-3: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-3.5: ) + $2 = nterm stmt (5.0-3: ) +-> $$ = nterm prog (1.1-5.3: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (7.0: ) +Shifting token TYPENAME (7.0: ) Entering state 4 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.2: ) +Shifting token ID (7.2: ) Entering state 11 -Reducing stack 0 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (7.2: ) +-> $$ = nterm declarator (7.2: ) Entering state 13 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (7.4: ) +Shifting token '=' (7.4: ) Entering state 22 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (7.6: ) +Shifting token ID (7.6: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (7.6: ) +-> $$ = nterm expr (7.6: ) Entering state 29 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (7.7: ) +Shifting token ';' (7.7: ) Entering state 30 -Reducing stack 0 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () +Reducing stack 0 by rule 12 (line 99): + $1 = token TYPENAME (7.0: ) + $2 = nterm declarator (7.2: ) + $3 = token '=' (7.4: ) + $4 = nterm expr (7.6: ) + $5 = token ';' (7.7: ) +-> $$ = nterm decl (7.0-7: ) +Entering state 9 +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (7.0-7: ) +-> $$ = nterm stmt (7.0-7: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-5.3: ) + $2 = nterm stmt (7.0-7: ) +-> $$ = nterm prog (1.1-7.7: ) +Entering state 1 +Reading a token +Next token is token ID (9.0: ) +Shifting token ID (9.0: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.0: ) +-> $$ = nterm expr (9.0: ) +Entering state 8 +Reading a token +Next token is token '=' (9.2: ) +Shifting token '=' (9.2: ) +Entering state 14 +Reading a token +Next token is token ID (9.4: ) +Shifting token ID (9.4: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.4: ) +-> $$ = nterm expr (9.4: ) +Entering state 24 +Reading a token +Next token is token ';' (9.5: ) +Reducing stack 0 by rule 10 (line 94): + $1 = nterm expr (9.0: ) + $2 = token '=' (9.2: ) + $3 = nterm expr (9.4: ) +-> $$ = nterm expr (9.0-4: ) +Entering state 8 +Next token is token ';' (9.5: ) +Shifting token ';' (9.5: ) +Entering state 16 +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (9.0-4: ) + $2 = token ';' (9.5: ) +-> $$ = nterm stmt (9.0-5: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-7.7: ) + $2 = nterm stmt (9.0-5: ) +-> $$ = nterm prog (1.1-9.5: ) +Entering state 1 +Reading a token +Next token is token TYPENAME (11.0: ) +Shifting token TYPENAME (11.0: ) +Entering state 4 +Reading a token +Next token is token '(' (11.2: ) +Shifting token '(' (11.2: ) +Entering state 12 +Reading a token +Next token is token ID (11.3: ) +Shifting token ID (11.3: ) +Entering state 18 +Reading a token +Next token is token ')' (11.4: ) +Stack 0 Entering state 18 +Next token is token ')' (11.4: ) +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' (11.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' (11.4: ) +Stack 1 Entering state 21 +Next token is token ')' (11.4: ) +On stack 0, shifting token ')' (11.4: ) +Stack 0 now in state 27 +On stack 1, shifting token ')' (11.4: ) +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '+' (11.6: ) +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '+' (11.6: ) +Stack 1 dies. +Removing dead stacks. +On stack 0, shifting token '+' (11.6: ) +Stack 0 now in state 15 +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (11.3: ) +-> $$ = nterm expr (11.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (11.0: ) + $2 = token '(' (11.2: ) + $3 = nterm expr (11.3: ) + $4 = token ')' (11.4: ) +-> $$ = nterm expr (11.0-4: ) +Returning to deterministic operation. +Entering state 15 +Reading a token +Next token is token ID (11.8: ) +Shifting token ID (11.8: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (11.8: ) +-> $$ = nterm expr (11.8: ) +Entering state 25 +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (11.0-4: ) + $2 = token '+' (11.6: ) + $3 = nterm expr (11.8: ) +-> $$ = nterm expr (11.0-8: ) +Entering state 8 +Reading a token +Next token is token ';' (11.9: ) +Shifting token ';' (11.9: ) +Entering state 16 +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (11.0-8: ) + $2 = token ';' (11.9: ) +-> $$ = nterm stmt (11.0-9: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-9.5: ) + $2 = nterm stmt (11.0-9: ) +-> $$ = nterm prog (1.1-11.9: ) +Entering state 1 +Reading a token +Next token is token TYPENAME (13.0: ) +Shifting token TYPENAME (13.0: ) +Entering state 4 +Reading a token +Next token is token '(' (13.2: ) +Shifting token '(' (13.2: ) +Entering state 12 +Reading a token +Next token is token ID (13.3: ) +Shifting token ID (13.3: ) +Entering state 18 +Reading a token +Next token is token ')' (13.4: ) +Stack 0 Entering state 18 +Next token is token ')' (13.4: ) +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' (13.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' (13.4: ) +Stack 1 Entering state 21 +Next token is token ')' (13.4: ) +On stack 0, shifting token ')' (13.4: ) +Stack 0 now in state 27 +On stack 1, shifting token ')' (13.4: ) +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token ';' (13.5: ) +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token ';' (13.5: ) +On stack 0, shifting token ';' (13.5: ) +Stack 0 now in state 16 +On stack 1, shifting token ';' (13.5: ) +Stack 1 now in state 23 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME (15.0: ) +Stack 1 Entering state 23 +Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME (15.0: ) +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (13.3: ) +-> $$ = nterm declarator (13.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (13.2: ) + $2 = nterm declarator (13.3: ) + $3 = token ')' (13.4: ) +-> $$ = nterm declarator (13.2-4: ) +Reducing stack -1 by rule 11 (line 97): + $1 = token TYPENAME (13.0: ) + $2 = nterm declarator (13.2-4: ) + $3 = token ';' (13.5: ) +-> $$ = nterm decl (13.0-5: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (13.0-5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (13.3: ) +-> $$ = nterm expr (13.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (13.0: ) + $2 = token '(' (13.2: ) + $3 = nterm expr (13.3: ) + $4 = token ')' (13.4: ) +-> $$ = nterm expr (13.0-4: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (13.0-4: ) + $2 = token ';' (13.5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-11.9: ) + $2 = nterm stmt (13.0-5: ) +-> $$ = nterm prog (1.1-13.5: ) +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' (15.2: ) +Shifting token '(' (15.2: ) +Entering state 12 +Reading a token +Next token is token ID (15.3: ) +Shifting token ID (15.3: ) +Entering state 18 +Reading a token +Next token is token ')' (15.4: ) +Stack 0 Entering state 18 +Next token is token ')' (15.4: ) +Splitting off stack 1 from 0. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. +Stack 1 Entering state 21 +Next token is token ')' (15.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. +Stack 0 Entering state 20 +Next token is token ')' (15.4: ) +Stack 1 Entering state 21 +Next token is token ')' (15.4: ) +On stack 0, shifting token ')' (15.4: ) +Stack 0 now in state 27 +On stack 1, shifting token ')' (15.4: ) +Stack 1 now in state 28 +Stack 0 Entering state 27 +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. +Stack 0 Entering state 8 +Reading a token +Next token is token '=' (15.6: ) +Stack 1 Entering state 28 +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. +Stack 1 Entering state 13 +Next token is token '=' (15.6: ) +On stack 0, shifting token '=' (15.6: ) +Stack 0 now in state 14 +On stack 1, shifting token '=' (15.6: ) +Stack 1 now in state 22 +Stack 0 Entering state 14 +Reading a token +Next token is token ID (15.8: ) +Stack 1 Entering state 22 +Next token is token ID (15.8: ) +On stack 0, shifting token ID (15.8: ) +Stack 0 now in state 5 +On stack 1, shifting token ID (15.8: ) +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token '+' (15.10: ) +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token '+' (15.10: ) +On stack 0, shifting token '+' (15.10: ) +Stack 0 now in state 15 +On stack 1, shifting token '+' (15.10: ) +Stack 1 now in state 15 +Stack 0 Entering state 15 +Reading a token +Next token is token ID (15.12: ) +Stack 1 Entering state 15 +Next token is token ID (15.12: ) +On stack 0, shifting token ID (15.12: ) +Stack 0 now in state 5 +On stack 1, shifting token ID (15.12: ) +Stack 1 now in state 5 +Stack 0 Entering state 5 +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. +Stack 0 Entering state 25 +Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. +Stack 0 Entering state 24 +Reading a token +Next token is token ';' (15.13: ) +Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. +Stack 0 Entering state 8 +Next token is token ';' (15.13: ) +Stack 1 Entering state 5 +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. +Stack 1 Entering state 25 +Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. +Stack 1 Entering state 29 +Next token is token ';' (15.13: ) +On stack 0, shifting token ';' (15.13: ) +Stack 0 now in state 16 +On stack 1, shifting token ';' (15.13: ) +Stack 1 now in state 30 +Stack 0 Entering state 16 +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. +Stack 0 Entering state 7 +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. +Stack 0 Entering state 1 +Reading a token +Next token is token TYPENAME (17.0: ) +Stack 1 Entering state 30 +Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. +Stack 1 Entering state 9 +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. +Stack 1 Entering state 7 +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. +Merging stack 1 into stack 0. +Removing dead stacks. +On stack 0, shifting token TYPENAME (17.0: ) +Stack 0 now in state 4 +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (15.3: ) +-> $$ = nterm declarator (15.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (15.2: ) + $2 = nterm declarator (15.3: ) + $3 = token ')' (15.4: ) +-> $$ = nterm declarator (15.2-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 12 (line 99): + $1 = token TYPENAME (15.0: ) + $2 = nterm declarator (15.2-4: ) + $3 = token '=' (15.6: ) + $4 = nterm expr (15.8-12: ) + $5 = token ';' (15.13: ) +-> $$ = nterm decl (15.0-13: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (15.0-13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.3: ) +-> $$ = nterm expr (15.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (15.0: ) + $2 = token '(' (15.2: ) + $3 = nterm expr (15.3: ) + $4 = token ')' (15.4: ) +-> $$ = nterm expr (15.0-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 10 (line 94): + $1 = nterm expr (15.0-4: ) + $2 = token '=' (15.6: ) + $3 = nterm expr (15.8-12: ) +-> $$ = nterm expr (15.0-12: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (15.0-12: ) + $2 = token ';' (15.13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-13.5: ) + $2 = nterm stmt (15.0-13: ) +-> $$ = nterm prog (1.1-15.13: ) +Returning to deterministic operation. +Entering state 4 +Reading a token +Next token is token '(' (17.2: ) +Shifting token '(' (17.2: ) +Entering state 12 +Reading a token +Next token is token ID (17.3: ) +Shifting token ID (17.3: ) +Entering state 18 +Reading a token +Next token is token ID (17.5: ) +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (17.3: ) +-> $$ = nterm expr (17.3: ) +Entering state 20 +Next token is token ID (17.5: ) +17.5: syntax error +Error: popping nterm expr (17.3: ) +Error: popping token '(' (17.2: ) +Error: popping token TYPENAME (17.0: ) +Shifting token error (17.0-5: ) +Entering state 3 +Next token is token ID (17.5: ) +Error: discarding token ID (17.5: ) +Reading a token +Next token is token ')' (17.6: ) +Error: discarding token ')' (17.6: ) +Reading a token +Next token is token '=' (17.8: ) +Error: discarding token '=' (17.8: ) +Reading a token +Next token is token ID (17.10: ) +Error: discarding token ID (17.10: ) +Reading a token +Next token is token '+' (17.12: ) +Error: discarding token '+' (17.12: ) +Reading a token +Next token is token ID (17.14: ) +Error: discarding token ID (17.14: ) +Reading a token +Next token is token ';' (17.15: ) +Entering state 3 +Next token is token ';' (17.15: ) +Shifting token ';' (17.15: ) +Entering state 10 +Reducing stack 0 by rule 5 (line 86): + $1 = token error (17.0-14: ) + $2 = token ';' (17.15: ) +-> $$ = nterm stmt (17.0-15: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-15.13: ) + $2 = nterm stmt (17.0-15: ) +-> $$ = nterm prog (1.1-17.15: ) +Entering state 1 +Reading a token +Next token is token ID (19.0: ) +Shifting token ID (19.0: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.0: ) +-> $$ = nterm expr (19.0: ) +Entering state 8 +Reading a token +Next token is token '+' (19.2: ) +Shifting token '+' (19.2: ) +Entering state 15 +Reading a token +Next token is token ID (19.4: ) +Shifting token ID (19.4: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.4: ) +-> $$ = nterm expr (19.4: ) +Entering state 25 +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (19.0: ) + $2 = token '+' (19.2: ) + $3 = nterm expr (19.4: ) +-> $$ = nterm expr (19.0-4: ) +Entering state 8 +Reading a token +Next token is token ';' (19.5: ) +Shifting token ';' (19.5: ) +Entering state 16 +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (19.0-4: ) + $2 = token ';' (19.5: ) +-> $$ = nterm stmt (19.0-5: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-17.15: ) + $2 = nterm stmt (19.0-5: ) +-> $$ = nterm prog (1.1-19.5: ) +Entering state 1 +Reading a token +Next token is token '@' (21.0: ) +Shifting token '@' (21.0: ) +Entering state 6 +Reducing stack 0 by rule 6 (line 87): + $1 = token '@' (21.0: ) +Cleanup: popping nterm prog (1.1-19.5: ) +./cxx-type.at:441: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE +714. glr-regression.at:205: testing Badly Collapsed GLR States: glr.c ... +./glr-regression.at:205: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.c glr-regr1.y +stderr: +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Reducing stack 0 by rule 1 (line 71): +-> $$ = nterm prog (1.1: ) +Entering state 1 +Reading a token +Next token is token ID (3.0: ) +Shifting token ID (3.0: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.0: ) +-> $$ = nterm expr (3.0: ) +Entering state 8 +Reading a token +Next token is token '+' (3.2: ) +Shifting token '+' (3.2: ) +Entering state 15 +Reading a token +Next token is token ID (3.4: ) +Shifting token ID (3.4: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (3.4: ) +-> $$ = nterm expr (3.4: ) +Entering state 25 +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (3.0: ) + $2 = token '+' (3.2: ) + $3 = nterm expr (3.4: ) +-> $$ = nterm expr (3.0-4: ) +Entering state 8 +Reading a token +Next token is token ';' (3.5: ) +Shifting token ';' (3.5: ) +Entering state 16 +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (3.0-4: ) + $2 = token ';' (3.5: ) +-> $$ = nterm stmt (3.0-5: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1: ) + $2 = nterm stmt (3.0-5: ) +-> $$ = nterm prog (1.1-3.5: ) +Entering state 1 +Reading a token +Next token is token TYPENAME (5.0: ) +Shifting token TYPENAME (5.0: ) +Entering state 4 +Reading a token +Next token is token ID (5.2: ) +Shifting token ID (5.2: ) +Entering state 11 +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (5.2: ) +-> $$ = nterm declarator (5.2: ) +Entering state 13 +Reading a token +Next token is token ';' (5.3: ) +Shifting token ';' (5.3: ) +Entering state 23 +Reducing stack 0 by rule 11 (line 97): + $1 = token TYPENAME (5.0: ) + $2 = nterm declarator (5.2: ) + $3 = token ';' (5.3: ) +-> $$ = nterm decl (5.0-3: ) +Entering state 9 +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (5.0-3: ) +-> $$ = nterm stmt (5.0-3: ) +Entering state 7 +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-3.5: ) + $2 = nterm stmt (5.0-3: ) +-> $$ = nterm prog (1.1-5.3: ) +Entering state 1 +Reading a token +Next token is token TYPENAME (7.0: ) +Shifting token TYPENAME (7.0: ) +Entering state 4 +Reading a token +Next token is token ID (7.2: ) +Shifting token ID (7.2: ) +Entering state 11 +Reducing stack 0 by rule 13 (line 104): + $1 = token ID (7.2: ) +-> $$ = nterm declarator (7.2: ) +Entering state 13 +Reading a token +Next token is token '=' (7.4: ) +Shifting token '=' (7.4: ) +Entering state 22 +Reading a token +Next token is token ID (7.6: ) +Shifting token ID (7.6: ) +Entering state 5 +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (7.6: ) +-> $$ = nterm expr (7.6: ) +Entering state 29 +Reading a token +Next token is token ';' (7.7: ) +Shifting token ';' (7.7: ) +Entering state 30 +Reducing stack 0 by rule 12 (line 99): + $1 = token TYPENAME (7.0: ) + $2 = nterm declarator (7.2: ) + $3 = token '=' (7.4: ) + $4 = nterm expr (7.6: ) + $5 = token ';' (7.7: ) +-> $$ = nterm decl (7.0-7: ) Entering state 9 -Reducing stack 0 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () +Reducing stack 0 by rule 4 (line 85): + $1 = nterm decl (7.0-7: ) +-> $$ = nterm stmt (7.0-7: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-5.3: ) + $2 = nterm stmt (7.0-7: ) +-> $$ = nterm prog (1.1-7.7: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.0: ) +Shifting token ID (9.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.0: ) +-> $$ = nterm expr (9.0: ) Entering state 8 Reading a token -Next token is token '=' () -Shifting token '=' () +Next token is token '=' (9.2: ) +Shifting token '=' (9.2: ) Entering state 14 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (9.4: ) +Shifting token ID (9.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (9.4: ) +-> $$ = nterm expr (9.4: ) Entering state 24 Reading a token -Next token is token ';' () -Reducing stack 0 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () +Next token is token ';' (9.5: ) +Reducing stack 0 by rule 10 (line 94): + $1 = nterm expr (9.0: ) + $2 = token '=' (9.2: ) + $3 = nterm expr (9.4: ) +-> $$ = nterm expr (9.0-4: ) Entering state 8 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (9.5: ) +Shifting token ';' (9.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (9.0-4: ) + $2 = token ';' (9.5: ) +-> $$ = nterm stmt (9.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-7.7: ) + $2 = nterm stmt (9.0-5: ) +-> $$ = nterm prog (1.1-9.5: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (11.0: ) +Shifting token TYPENAME (11.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (11.2: ) +Shifting token '(' (11.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.3: ) +Shifting token ID (11.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (11.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (11.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (11.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (11.4: ) +On stack 0, shifting token ')' (11.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (11.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '+' () +Next token is token '+' (11.6: ) Stack 1 dies. Removing dead stacks. -On stack 0, shifting token '+' () +On stack 0, shifting token '+' (11.6: ) Stack 0 now in state 15 -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (11.3: ) +-> $$ = nterm expr (11.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (11.0: ) + $2 = token '(' (11.2: ) + $3 = nterm expr (11.3: ) + $4 = token ')' (11.4: ) +-> $$ = nterm expr (11.0-4: ) Returning to deterministic operation. Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (11.8: ) +Shifting token ID (11.8: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (11.8: ) +-> $$ = nterm expr (11.8: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (11.0-4: ) + $2 = token '+' (11.6: ) + $3 = nterm expr (11.8: ) +-> $$ = nterm expr (11.0-8: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (11.9: ) +Shifting token ';' (11.9: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (11.0-8: ) + $2 = token ';' (11.9: ) +-> $$ = nterm stmt (11.0-9: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-9.5: ) + $2 = nterm stmt (11.0-9: ) +-> $$ = nterm prog (1.1-11.9: ) Entering state 1 Reading a token -Next token is token TYPENAME () -Shifting token TYPENAME () +Next token is token TYPENAME (13.0: ) +Shifting token TYPENAME (13.0: ) Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (13.2: ) +Shifting token '(' (13.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (13.3: ) +Shifting token ID (13.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (13.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (13.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (13.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (13.4: ) +On stack 0, shifting token ')' (13.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (13.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token ';' () +Next token is token ';' (13.5: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (13.5: ) +On stack 0, shifting token ';' (13.5: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (13.5: ) Stack 1 now in state 23 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (15.0: ) Stack 1 Entering state 23 -Reduced stack 1 by rule 11 (line 87); action deferred. Now in state 9. +Reduced stack 1 by rule 11 (line 97); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (15.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 11 (line 87): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (13.3: ) +-> $$ = nterm declarator (13.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (13.2: ) + $2 = nterm declarator (13.3: ) + $3 = token ')' (13.4: ) +-> $$ = nterm declarator (13.2-4: ) +Reducing stack -1 by rule 11 (line 97): + $1 = token TYPENAME (13.0: ) + $2 = nterm declarator (13.2-4: ) + $3 = token ';' (13.5: ) +-> $$ = nterm decl (13.0-5: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (13.0-5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (13.3: ) +-> $$ = nterm expr (13.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (13.0: ) + $2 = token '(' (13.2: ) + $3 = nterm expr (13.3: ) + $4 = token ')' (13.4: ) +-> $$ = nterm expr (13.0-4: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (13.0-4: ) + $2 = token ';' (13.5: ) +-> $$ = nterm stmt (13.0-5: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-11.9: ) + $2 = nterm stmt (13.0-5: ) +-> $$ = nterm prog (1.1-13.5: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (15.2: ) +Shifting token '(' (15.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (15.3: ) +Shifting token ID (15.3: ) Entering state 18 Reading a token -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 0 Entering state 18 -Next token is token ')' () +Next token is token ')' (15.4: ) Splitting off stack 1 from 0. -Reduced stack 1 by rule 13 (line 94); action deferred. Now in state 21. +Reduced stack 1 by rule 13 (line 104); action deferred. Now in state 21. Stack 1 Entering state 21 -Next token is token ')' () -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 20. +Next token is token ')' (15.4: ) +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 20. Stack 0 Entering state 20 -Next token is token ')' () +Next token is token ')' (15.4: ) Stack 1 Entering state 21 -Next token is token ')' () -On stack 0, shifting token ')' () +Next token is token ')' (15.4: ) +On stack 0, shifting token ')' (15.4: ) Stack 0 now in state 27 -On stack 1, shifting token ')' () +On stack 1, shifting token ')' (15.4: ) Stack 1 now in state 28 Stack 0 Entering state 27 -Reduced stack 0 by rule 8 (line 81); action deferred. Now in state 8. +Reduced stack 0 by rule 8 (line 91); action deferred. Now in state 8. Stack 0 Entering state 8 Reading a token -Next token is token '=' () +Next token is token '=' (15.6: ) Stack 1 Entering state 28 -Reduced stack 1 by rule 14 (line 95); action deferred. Now in state 13. +Reduced stack 1 by rule 14 (line 105); action deferred. Now in state 13. Stack 1 Entering state 13 -Next token is token '=' () -On stack 0, shifting token '=' () +Next token is token '=' (15.6: ) +On stack 0, shifting token '=' (15.6: ) Stack 0 now in state 14 -On stack 1, shifting token '=' () +On stack 1, shifting token '=' (15.6: ) Stack 1 now in state 22 Stack 0 Entering state 14 Reading a token -Next token is token ID () +Next token is token ID (15.8: ) Stack 1 Entering state 22 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.8: ) +On stack 0, shifting token ID (15.8: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.8: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 24. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token '+' () +Next token is token '+' (15.10: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 29. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token '+' () -On stack 0, shifting token '+' () +Next token is token '+' (15.10: ) +On stack 0, shifting token '+' (15.10: ) Stack 0 now in state 15 -On stack 1, shifting token '+' () +On stack 1, shifting token '+' (15.10: ) Stack 1 now in state 15 Stack 0 Entering state 15 Reading a token -Next token is token ID () +Next token is token ID (15.12: ) Stack 1 Entering state 15 -Next token is token ID () -On stack 0, shifting token ID () +Next token is token ID (15.12: ) +On stack 0, shifting token ID (15.12: ) Stack 0 now in state 5 -On stack 1, shifting token ID () +On stack 1, shifting token ID (15.12: ) Stack 1 now in state 5 Stack 0 Entering state 5 -Reduced stack 0 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 0 by rule 7 (line 90); action deferred. Now in state 25. Stack 0 Entering state 25 -Reduced stack 0 by rule 9 (line 83); action deferred. Now in state 24. +Reduced stack 0 by rule 9 (line 93); action deferred. Now in state 24. Stack 0 Entering state 24 Reading a token -Next token is token ';' () -Reduced stack 0 by rule 10 (line 84); action deferred. Now in state 8. +Next token is token ';' (15.13: ) +Reduced stack 0 by rule 10 (line 94); action deferred. Now in state 8. Stack 0 Entering state 8 -Next token is token ';' () +Next token is token ';' (15.13: ) Stack 1 Entering state 5 -Reduced stack 1 by rule 7 (line 80); action deferred. Now in state 25. +Reduced stack 1 by rule 7 (line 90); action deferred. Now in state 25. Stack 1 Entering state 25 -Reduced stack 1 by rule 9 (line 83); action deferred. Now in state 29. +Reduced stack 1 by rule 9 (line 93); action deferred. Now in state 29. Stack 1 Entering state 29 -Next token is token ';' () -On stack 0, shifting token ';' () +Next token is token ';' (15.13: ) +On stack 0, shifting token ';' (15.13: ) Stack 0 now in state 16 -On stack 1, shifting token ';' () +On stack 1, shifting token ';' (15.13: ) Stack 1 now in state 30 Stack 0 Entering state 16 -Reduced stack 0 by rule 3 (line 74); action deferred. Now in state 7. +Reduced stack 0 by rule 3 (line 84); action deferred. Now in state 7. Stack 0 Entering state 7 -Reduced stack 0 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 0 by rule 2 (line 72); action deferred. Now in state 1. Stack 0 Entering state 1 Reading a token -Next token is token TYPENAME () +Next token is token TYPENAME (17.0: ) Stack 1 Entering state 30 -Reduced stack 1 by rule 12 (line 89); action deferred. Now in state 9. +Reduced stack 1 by rule 12 (line 99); action deferred. Now in state 9. Stack 1 Entering state 9 -Reduced stack 1 by rule 4 (line 75); action deferred. Now in state 7. +Reduced stack 1 by rule 4 (line 85); action deferred. Now in state 7. Stack 1 Entering state 7 -Reduced stack 1 by rule 2 (line 65); action deferred. Now in state 1. +Reduced stack 1 by rule 2 (line 72); action deferred. Now in state 1. Merging stack 1 into stack 0. Removing dead stacks. -On stack 0, shifting token TYPENAME () +On stack 0, shifting token TYPENAME (17.0: ) Stack 0 now in state 4 -Reducing stack -1 by rule 13 (line 94): - $1 = token ID () --> $$ = nterm declarator () -Reducing stack -1 by rule 14 (line 95): - $1 = token '(' () - $2 = nterm declarator () - $3 = token ')' () --> $$ = nterm declarator () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 12 (line 89): - $1 = token TYPENAME () - $2 = nterm declarator () - $3 = token '=' () - $4 = nterm expr () - $5 = token ';' () --> $$ = nterm decl () -Reducing stack -1 by rule 4 (line 75): - $1 = nterm decl () --> $$ = nterm stmt () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 8 (line 81): - $1 = token TYPENAME () - $2 = token '(' () - $3 = nterm expr () - $4 = token ')' () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () -Reducing stack -1 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 10 (line 84): - $1 = nterm expr () - $2 = token '=' () - $3 = nterm expr () --> $$ = nterm expr () -Reducing stack -1 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () -Reducing stack -1 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack -1 by rule 13 (line 104): + $1 = token ID (15.3: ) +-> $$ = nterm declarator (15.3: ) +Reducing stack -1 by rule 14 (line 105): + $1 = token '(' (15.2: ) + $2 = nterm declarator (15.3: ) + $3 = token ')' (15.4: ) +-> $$ = nterm declarator (15.2-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 12 (line 99): + $1 = token TYPENAME (15.0: ) + $2 = nterm declarator (15.2-4: ) + $3 = token '=' (15.6: ) + $4 = nterm expr (15.8-12: ) + $5 = token ';' (15.13: ) +-> $$ = nterm decl (15.0-13: ) +Reducing stack -1 by rule 4 (line 85): + $1 = nterm decl (15.0-13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.3: ) +-> $$ = nterm expr (15.3: ) +Reducing stack -1 by rule 8 (line 91): + $1 = token TYPENAME (15.0: ) + $2 = token '(' (15.2: ) + $3 = nterm expr (15.3: ) + $4 = token ')' (15.4: ) +-> $$ = nterm expr (15.0-4: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.8: ) +-> $$ = nterm expr (15.8: ) +Reducing stack -1 by rule 7 (line 90): + $1 = token ID (15.12: ) +-> $$ = nterm expr (15.12: ) +Reducing stack -1 by rule 9 (line 93): + $1 = nterm expr (15.8: ) + $2 = token '+' (15.10: ) + $3 = nterm expr (15.12: ) +-> $$ = nterm expr (15.8-12: ) +Reducing stack -1 by rule 10 (line 94): + $1 = nterm expr (15.0-4: ) + $2 = token '=' (15.6: ) + $3 = nterm expr (15.8-12: ) +-> $$ = nterm expr (15.0-12: ) +Reducing stack -1 by rule 3 (line 84): + $1 = nterm expr (15.0-12: ) + $2 = token ';' (15.13: ) +-> $$ = nterm stmt (15.0-13: ) +Reducing stack -1 by rule 2 (line 72): + $1 = nterm prog (1.1-13.5: ) + $2 = nterm stmt (15.0-13: ) +-> $$ = nterm prog (1.1-15.13: ) Returning to deterministic operation. Entering state 4 Reading a token -Next token is token '(' () -Shifting token '(' () +Next token is token '(' (17.2: ) +Shifting token '(' (17.2: ) Entering state 12 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (17.3: ) +Shifting token ID (17.3: ) Entering state 18 Reading a token -Next token is token ID () -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Next token is token ID (17.5: ) +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (17.3: ) +-> $$ = nterm expr (17.3: ) Entering state 20 -Next token is token ID () -syntax error -Error: popping nterm expr () -Error: popping token '(' () -Error: popping token TYPENAME () -Shifting token error () +Next token is token ID (17.5: ) +17.5: syntax error +Error: popping nterm expr (17.3: ) +Error: popping token '(' (17.2: ) +Error: popping token TYPENAME (17.0: ) +Shifting token error (17.0-5: ) Entering state 3 -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.5: ) +Error: discarding token ID (17.5: ) Reading a token -Next token is token ')' () -Error: discarding token ')' () +Next token is token ')' (17.6: ) +Error: discarding token ')' (17.6: ) Reading a token -Next token is token '=' () -Error: discarding token '=' () +Next token is token '=' (17.8: ) +Error: discarding token '=' (17.8: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.10: ) +Error: discarding token ID (17.10: ) Reading a token -Next token is token '+' () -Error: discarding token '+' () +Next token is token '+' (17.12: ) +Error: discarding token '+' (17.12: ) Reading a token -Next token is token ID () -Error: discarding token ID () +Next token is token ID (17.14: ) +Error: discarding token ID (17.14: ) Reading a token -Next token is token ';' () +Next token is token ';' (17.15: ) Entering state 3 -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (17.15: ) +Shifting token ';' (17.15: ) Entering state 10 -Reducing stack 0 by rule 5 (line 76): - $1 = token error () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 5 (line 86): + $1 = token error (17.0-14: ) + $2 = token ';' (17.15: ) +-> $$ = nterm stmt (17.0-15: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-15.13: ) + $2 = nterm stmt (17.0-15: ) +-> $$ = nterm prog (1.1-17.15: ) Entering state 1 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.0: ) +Shifting token ID (19.0: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.0: ) +-> $$ = nterm expr (19.0: ) Entering state 8 Reading a token -Next token is token '+' () -Shifting token '+' () +Next token is token '+' (19.2: ) +Shifting token '+' (19.2: ) Entering state 15 Reading a token -Next token is token ID () -Shifting token ID () +Next token is token ID (19.4: ) +Shifting token ID (19.4: ) Entering state 5 -Reducing stack 0 by rule 7 (line 80): - $1 = token ID () --> $$ = nterm expr () +Reducing stack 0 by rule 7 (line 90): + $1 = token ID (19.4: ) +-> $$ = nterm expr (19.4: ) Entering state 25 -Reducing stack 0 by rule 9 (line 83): - $1 = nterm expr () - $2 = token '+' () - $3 = nterm expr () --> $$ = nterm expr () +Reducing stack 0 by rule 9 (line 93): + $1 = nterm expr (19.0: ) + $2 = token '+' (19.2: ) + $3 = nterm expr (19.4: ) +-> $$ = nterm expr (19.0-4: ) Entering state 8 Reading a token -Next token is token ';' () -Shifting token ';' () +Next token is token ';' (19.5: ) +Shifting token ';' (19.5: ) Entering state 16 -Reducing stack 0 by rule 3 (line 74): - $1 = nterm expr () - $2 = token ';' () --> $$ = nterm stmt () +Reducing stack 0 by rule 3 (line 84): + $1 = nterm expr (19.0-4: ) + $2 = token ';' (19.5: ) +-> $$ = nterm stmt (19.0-5: ) Entering state 7 -Reducing stack 0 by rule 2 (line 65): - $1 = nterm prog () - $2 = nterm stmt () --> $$ = nterm prog () +Reducing stack 0 by rule 2 (line 72): + $1 = nterm prog (1.1-17.15: ) + $2 = nterm stmt (19.0-5: ) +-> $$ = nterm prog (1.1-19.5: ) Entering state 1 Reading a token -Next token is token '@' () -Shifting token '@' () +Next token is token '@' (21.0: ) +Shifting token '@' (21.0: ) Entering state 6 -Reducing stack 0 by rule 6 (line 77): - $1 = token '@' () -Cleanup: popping nterm prog () +Reducing stack 0 by rule 6 (line 87): + $1 = token '@' (21.0: ) +Cleanup: popping nterm prog (1.1-19.5: ) +710. cxx-type.at:438: ok +./c++.at:1363: $PREPARSER ./input aaaaT +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./c++.at:1363: $PREPARSER ./input aaaaR +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:205: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr1 glr-regr1.c $LIBS +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +715. glr-regression.at:206: testing Badly Collapsed GLR States: glr.cc ... +./glr-regression.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y +stderr: +stdout: +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:206: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1555: $PREPARSER ./test +stderr: +./c++.at:1361: $PREPARSER ./input aaaal +stderr: +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: yylex +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1361: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x55ba9bec9b40->Object::Object { } +Next token is token 'a' (0x55ba9bec9b40 'a') +Shifting token 'a' (0x55ba9bec9b40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55ba9bec9b40 'a') +-> $$ = nterm item (0x55ba9bec9b40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x55ba9bec9b90->Object::Object { 0x55ba9bec9b40 } +Next token is token 'a' (0x55ba9bec9b90 'a') +Shifting token 'a' (0x55ba9bec9b90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55ba9bec9b90 'a') +-> $$ = nterm item (0x55ba9bec9b90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x55ba9bec9be0->Object::Object { 0x55ba9bec9b40, 0x55ba9bec9b90 } +Next token is token 'a' (0x55ba9bec9be0 'a') +Shifting token 'a' (0x55ba9bec9be0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55ba9bec9be0 'a') +-> $$ = nterm item (0x55ba9bec9be0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x55ba9bec9c30->Object::Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0 } +Next token is token 'a' (0x55ba9bec9c30 'a') +Shifting token 'a' (0x55ba9bec9c30 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55ba9bec9c30 'a') +-> $$ = nterm item (0x55ba9bec9c30 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x55ba9bec9c80->Object::Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0, 0x55ba9bec9c30 } +Next token is token 'p' (0x55ba9bec9c80 'p'Exception caught: cleaning lookahead and stack +0x55ba9bec9c80->Object::~Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0, 0x55ba9bec9c30, 0x55ba9bec9c80 } +0x55ba9bec9c30->Object::~Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0, 0x55ba9bec9c30 } +0x55ba9bec9be0->Object::~Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0 } +0x55ba9bec9b90->Object::~Object { 0x55ba9bec9b40, 0x55ba9bec9b90 } +0x55ba9bec9b40->Object::~Object { 0x55ba9bec9b40 } +exception caught: printer +end { } +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x55ba9bec9b40->Object::Object { } +Next token is token 'a' (0x55ba9bec9b40 'a') +Shifting token 'a' (0x55ba9bec9b40 'a') +Entering state 1 +Stack now 0 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55ba9bec9b40 'a') +-> $$ = nterm item (0x55ba9bec9b40 'a') +Entering state 10 +Stack now 0 10 +Reading a token +0x55ba9bec9b90->Object::Object { 0x55ba9bec9b40 } +Next token is token 'a' (0x55ba9bec9b90 'a') +Shifting token 'a' (0x55ba9bec9b90 'a') +Entering state 1 +Stack now 0 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55ba9bec9b90 'a') +-> $$ = nterm item (0x55ba9bec9b90 'a') +Entering state 10 +Stack now 0 10 10 +Reading a token +0x55ba9bec9be0->Object::Object { 0x55ba9bec9b40, 0x55ba9bec9b90 } +Next token is token 'a' (0x55ba9bec9be0 'a') +Shifting token 'a' (0x55ba9bec9be0 'a') +Entering state 1 +Stack now 0 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55ba9bec9be0 'a') +-> $$ = nterm item (0x55ba9bec9be0 'a') +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x55ba9bec9c30->Object::Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0 } +Next token is token 'a' (0x55ba9bec9c30 'a') +Shifting token 'a' (0x55ba9bec9c30 'a') +Entering state 1 +Stack now 0 10 10 10 1 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55ba9bec9c30 'a') +-> $$ = nterm item (0x55ba9bec9c30 'a') +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x55ba9bec9c80->Object::Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0, 0x55ba9bec9c30 } +Next token is token 'p' (0x55ba9bec9c80 'p'Exception caught: cleaning lookahead and stack +0x55ba9bec9c80->Object::~Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0, 0x55ba9bec9c30, 0x55ba9bec9c80 } +0x55ba9bec9c30->Object::~Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0, 0x55ba9bec9c30 } +0x55ba9bec9be0->Object::~Object { 0x55ba9bec9b40, 0x55ba9bec9b90, 0x55ba9bec9be0 } +0x55ba9bec9b90->Object::~Object { 0x55ba9bec9b40, 0x55ba9bec9b90 } +0x55ba9bec9b40->Object::~Object { 0x55ba9bec9b40 } +exception caught: printer +end { } +./c++.at:1361: grep '^exception caught: printer$' stderr +stderr: +stdout: +exception caught: printer +stdout: +./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:1555: ./check +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +stderr: +exception caught: syntax error +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stderr: +stdout: +stdout: +./c++.at:851: $PREPARSER ./input +./cxx-type.at:452: $PREPARSER ./types test-input +stderr: +stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr 17.5: syntax error -711. cxx-type.at:444: ok +======== Testing with C++ standard flags: '' ./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: ./cxx-type.at:452: $PREPARSER ./types -p test-input +./cxx-type.at:458: $PREPARSER ./types test-input +stderr: stderr: Starting parse Entering state 0 @@ -263568,8 +263928,9 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) - +syntax error, unexpected ID, expecting '=' or '+' or ')' ./cxx-type.at:452: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 @@ -264179,221 +264540,8 @@ Reducing stack 0 by rule 6 (line 87): $1 = token '@' (21.0: ) Cleanup: popping nterm prog (1.1-19.5: ) -715. glr-regression.at:206: testing Badly Collapsed GLR States: glr.cc ... -./glr-regression.at:206: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y -712. cxx-type.at:449: ok - -716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ... -./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y -./glr-regression.at:206: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS -717. glr-regression.at:354: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.c ... -./glr-regression.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.c glr-regr2a.y -./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS -stderr: -stdout: -./c++.at:1360: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.c $LIBS -./c++.at:1360: $PREPARSER ./input i -stderr: -stderr: -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:857: $PREPARSER ./input -stderr: -./c++.at:1360: $PREPARSER ./input aaaap -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1360: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x5617bb585b40->Object::Object { } -Next token is token 'a' (0x5617bb585b40 'a') -Shifting token 'a' (0x5617bb585b40 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5617bb585b40 'a') --> $$ = nterm item (0x5617bb585b40 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x5617bb585b90->Object::Object { 0x5617bb585b40 } -Next token is token 'a' (0x5617bb585b90 'a') -Shifting token 'a' (0x5617bb585b90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5617bb585b90 'a') --> $$ = nterm item (0x5617bb585b90 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x5617bb585be0->Object::Object { 0x5617bb585b40, 0x5617bb585b90 } -Next token is token 'a' (0x5617bb585be0 'a') -Shifting token 'a' (0x5617bb585be0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5617bb585be0 'a') --> $$ = nterm item (0x5617bb585be0 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x5617bb585c30->Object::Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0 } -Next token is token 'a' (0x5617bb585c30 'a') -Shifting token 'a' (0x5617bb585c30 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5617bb585c30 'a') --> $$ = nterm item (0x5617bb585c30 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x5617bb585c80->Object::Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0, 0x5617bb585c30 } -Next token is token 'p' (0x5617bb585c80 'p'Exception caught: cleaning lookahead and stack -0x5617bb585c80->Object::~Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0, 0x5617bb585c30, 0x5617bb585c80 } -0x5617bb585c30->Object::~Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0, 0x5617bb585c30 } -0x5617bb585be0->Object::~Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0 } -0x5617bb585b90->Object::~Object { 0x5617bb585b40, 0x5617bb585b90 } -0x5617bb585b40->Object::~Object { 0x5617bb585b40 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x5617bb585b40->Object::Object { } -Next token is token 'a' (0x5617bb585b40 'a') -Shifting token 'a' (0x5617bb585b40 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5617bb585b40 'a') --> $$ = nterm item (0x5617bb585b40 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x5617bb585b90->Object::Object { 0x5617bb585b40 } -Next token is token 'a' (0x5617bb585b90 'a') -Shifting token 'a' (0x5617bb585b90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5617bb585b90 'a') --> $$ = nterm item (0x5617bb585b90 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x5617bb585be0->Object::Object { 0x5617bb585b40, 0x5617bb585b90 } -Next token is token 'a' (0x5617bb585be0 'a') -Shifting token 'a' (0x5617bb585be0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5617bb585be0 'a') --> $$ = nterm item (0x5617bb585be0 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x5617bb585c30->Object::Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0 } -Next token is token 'a' (0x5617bb585c30 'a') -Shifting token 'a' (0x5617bb585c30 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5617bb585c30 'a') --> $$ = nterm item (0x5617bb585c30 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x5617bb585c80->Object::Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0, 0x5617bb585c30 } -Next token is token 'p' (0x5617bb585c80 'p'Exception caught: cleaning lookahead and stack -0x5617bb585c80->Object::~Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0, 0x5617bb585c30, 0x5617bb585c80 } -0x5617bb585c30->Object::~Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0, 0x5617bb585c30 } -0x5617bb585be0->Object::~Object { 0x5617bb585b40, 0x5617bb585b90, 0x5617bb585be0 } -0x5617bb585b90->Object::~Object { 0x5617bb585b40, 0x5617bb585b90 } -0x5617bb585b40->Object::~Object { 0x5617bb585b40 } -exception caught: printer -end { } -stdout: -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -./c++.at:1065: $PREPARSER ./input < in -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -stderr: -error: invalid expression -caught error -error: invalid character -caught error -exception caught: syntax error -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -error: invalid expression -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -======== Testing with C++ standard flags: '' -./c++.at:851: $PREPARSER ./input -./c++.at:1065: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stderr: -./c++.at:851: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -stderr: -stdout: -./glr-regression.at:205: $PREPARSER ./glr-regr1 BPBPB -./cxx-type.at:458: $PREPARSER ./types test-input -stderr: -stderr: -syntax error, unexpected ID, expecting '=' or '+' or ')' -./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./cxx-type.at:458: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -714. glr-regression.at:205: ok ./cxx-type.at:458: $PREPARSER ./types -p test-input +712. cxx-type.at:449: ok stderr: Starting parse Entering state 0 @@ -265615,363 +265763,159 @@ $1 = token '@' () Cleanup: popping nterm prog () 713. cxx-type.at:455: ok - -718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ... -./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y -719. glr-regression.at:356: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr2.cc ... -./glr-regression.at:356: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y -./glr-regression.at:355: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS -./glr-regression.at:356: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS -stderr: -stdout: -./glr-regression.at:354: $PREPARSER ./glr-regr2a input1.txt -stderr: -stderr: -./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1064: $PREPARSER ./input < in -stderr: -./glr-regression.at:354: $PREPARSER ./glr-regr2a input2.txt -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: -stderr: -stdout: -stdout: -./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaas -./c++.at:1555: $PREPARSER ./test -stderr: -./c++.at:1064: $PREPARSER ./input < in -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:354: $PREPARSER ./glr-regr2a input3.txt -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -stderr: -error: invalid expression -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i -./c++.at:1064: $PREPARSER ./input < in -stderr: -exception caught: initial-action -717. glr-regression.at:354: ok -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid character -./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap - -stderr: -682. c++.at:1064: ok -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55fb45963b40->Object::Object { } -Next token is token 'a' (0x55fb45963b40 'a') -Shifting token 'a' (0x55fb45963b40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fb45963b40 'a') --> $$ = nterm item (0x55fb45963b40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x55fb45963b90->Object::Object { 0x55fb45963b40 } -Next token is token 'a' (0x55fb45963b90 'a') -Shifting token 'a' (0x55fb45963b90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fb45963b90 'a') --> $$ = nterm item (0x55fb45963b90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x55fb45963be0->Object::Object { 0x55fb45963b40, 0x55fb45963b90 } -Next token is token 'a' (0x55fb45963be0 'a') -Shifting token 'a' (0x55fb45963be0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fb45963be0 'a') --> $$ = nterm item (0x55fb45963be0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x55fb45963c30->Object::Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0 } -Next token is token 'a' (0x55fb45963c30 'a') -Shifting token 'a' (0x55fb45963c30 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fb45963c30 'a') --> $$ = nterm item (0x55fb45963c30 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x55fb45963c80->Object::Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0, 0x55fb45963c30 } -Next token is token 'p' (0x55fb45963c80 'p'Exception caught: cleaning lookahead and stack -0x55fb45963c80->Object::~Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0, 0x55fb45963c30, 0x55fb45963c80 } -0x55fb45963c30->Object::~Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0, 0x55fb45963c30 } -0x55fb45963be0->Object::~Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0 } -0x55fb45963b90->Object::~Object { 0x55fb45963b40, 0x55fb45963b90 } -0x55fb45963b40->Object::~Object { 0x55fb45963b40 } -exception caught: printer -end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55fb45963b40->Object::Object { } -Next token is token 'a' (0x55fb45963b40 'a') -Shifting token 'a' (0x55fb45963b40 'a') -Entering state 1 -Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fb45963b40 'a') --> $$ = nterm item (0x55fb45963b40 'a') -Entering state 10 -Stack now 0 10 -Reading a token -0x55fb45963b90->Object::Object { 0x55fb45963b40 } -Next token is token 'a' (0x55fb45963b90 'a') -Shifting token 'a' (0x55fb45963b90 'a') -Entering state 1 -Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fb45963b90 'a') --> $$ = nterm item (0x55fb45963b90 'a') -Entering state 10 -Stack now 0 10 10 -Reading a token -0x55fb45963be0->Object::Object { 0x55fb45963b40, 0x55fb45963b90 } -Next token is token 'a' (0x55fb45963be0 'a') -Shifting token 'a' (0x55fb45963be0 'a') -Entering state 1 -Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fb45963be0 'a') --> $$ = nterm item (0x55fb45963be0 'a') -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x55fb45963c30->Object::Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0 } -Next token is token 'a' (0x55fb45963c30 'a') -Shifting token 'a' (0x55fb45963c30 'a') -Entering state 1 -Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fb45963c30 'a') --> $$ = nterm item (0x55fb45963c30 'a') -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x55fb45963c80->Object::Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0, 0x55fb45963c30 } -Next token is token 'p' (0x55fb45963c80 'p'Exception caught: cleaning lookahead and stack -0x55fb45963c80->Object::~Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0, 0x55fb45963c30, 0x55fb45963c80 } -0x55fb45963c30->Object::~Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0, 0x55fb45963c30 } -0x55fb45963be0->Object::~Object { 0x55fb45963b40, 0x55fb45963b90, 0x55fb45963be0 } -0x55fb45963b90->Object::~Object { 0x55fb45963b40, 0x55fb45963b90 } -0x55fb45963b40->Object::~Object { 0x55fb45963b40 } -exception caught: printer -end { } -./c++.at:1555: ./check -./c++.at:1361: grep '^exception caught: printer$' stderr -stdout: -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae -720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ... -stderr: -exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y -./c++.at:1361: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:1361: $PREPARSER ./input aaaaT ./c++.at:1362: $PREPARSER ./input aaaas stderr: -stderr: exception caught: reduction ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR -721. glr-regression.at:489: testing Improper merging of GLR delayed action sets: glr.cc ... +716. glr-regression.at:207: testing Badly Collapsed GLR States: glr2.cc ... +./glr-regression.at:207: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o glr-regr1.cc glr-regr1.y +stderr: +stdout: ./c++.at:1362: $PREPARSER ./input aaaal -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./glr-regression.at:205: $PREPARSER ./glr-regr1 BPBPB stderr: -./glr-regression.at:489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y stderr: -./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS exception caught: yylex ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: +./glr-regression.at:205: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input i -./c++.at:857: $PREPARSER ./input -======== Testing with C++ standard flags: '' -stderr: -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +714. glr-regression.at:205: ok stderr: +./glr-regression.at:207: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr1 glr-regr1.cc $LIBS exception caught: initial-action ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1362: $PREPARSER ./input aaaap + stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +717. glr-regression.at:354: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.c ... +./glr-regression.at:354: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.c glr-regr2a.y ./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffcb921384f->Object::Object { } -0x7ffcb92138f0->Object::Object { 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'a' (0x7ffcb92138f0 'a') -0x7ffcb9213860->Object::Object { 0x7ffcb92138f0 } -0x7ffcb9213817->Object::Object { 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::~Object { 0x7ffcb9213817, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x7ffcb9213860, 0x7ffcb92138f0 } -Shifting token 'a' (0x7ffcb9213860 'a') -0x55f61498eee0->Object::Object { 0x7ffcb9213860 } -0x7ffcb92137ef->Object::Object { 0x55f61498eee0, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::~Object { 0x55f61498eee0, 0x7ffcb92137ef, 0x7ffcb9213860 } -0x7ffcb9213860->Object::~Object { 0x55f61498eee0, 0x7ffcb9213860 } +0x7ffc1d59c8cf->Object::Object { } +0x7ffc1d59c970->Object::Object { 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'a' (0x7ffc1d59c970 'a') +0x7ffc1d59c8e0->Object::Object { 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::Object { 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::~Object { 0x7ffc1d59c897, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +Shifting token 'a' (0x7ffc1d59c8e0 'a') +0x55e57c91aee0->Object::Object { 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c86f, 0x7ffc1d59c8e0 } +0x7ffc1d59c8e0->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c8e0 } Entering state 2 Stack now 0 2 -0x7ffcb9213910->Object::Object { 0x55f61498eee0 } +0x7ffc1d59c990->Object::Object { 0x55e57c91aee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f61498eee0 'a') --> $$ = nterm item (0x7ffcb9213910 'a') -0x55f61498eee0->Object::~Object { 0x55f61498eee0, 0x7ffcb9213910 } -0x55f61498eee0->Object::Object { 0x7ffcb9213910 } -0x7ffcb92138c8->Object::Object { 0x55f61498eee0, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::~Object { 0x55f61498eee0, 0x7ffcb92138c8, 0x7ffcb9213910 } -0x7ffcb9213910->Object::~Object { 0x55f61498eee0, 0x7ffcb9213910 } + $1 = token 'a' (0x55e57c91aee0 'a') +-> $$ = nterm item (0x7ffc1d59c990 'a') +0x55e57c91aee0->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c990 } +0x55e57c91aee0->Object::Object { 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c948, 0x7ffc1d59c990 } +0x7ffc1d59c990->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c990 } Entering state 11 Stack now 0 11 Reading a token -0x7ffcb921384f->Object::Object { 0x55f61498eee0 } -0x7ffcb92138f0->Object::Object { 0x55f61498eee0, 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x55f61498eee0, 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'a' (0x7ffcb92138f0 'a') -0x7ffcb9213860->Object::Object { 0x55f61498eee0, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::Object { 0x55f61498eee0, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::~Object { 0x55f61498eee0, 0x7ffcb9213817, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x55f61498eee0, 0x7ffcb9213860, 0x7ffcb92138f0 } -Shifting token 'a' (0x7ffcb9213860 'a') -0x55f61498ef00->Object::Object { 0x55f61498eee0, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb92137ef, 0x7ffcb9213860 } -0x7ffcb9213860->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860 } +0x7ffc1d59c8cf->Object::Object { 0x55e57c91aee0 } +0x7ffc1d59c970->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'a' (0x7ffc1d59c970 'a') +0x7ffc1d59c8e0->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c897, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +Shifting token 'a' (0x7ffc1d59c8e0 'a') +0x55e57c91af00->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c86f, 0x7ffc1d59c8e0 } +0x7ffc1d59c8e0->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0 } Entering state 2 Stack now 0 11 2 -0x7ffcb9213910->Object::Object { 0x55f61498eee0, 0x55f61498ef00 } +0x7ffc1d59c990->Object::Object { 0x55e57c91aee0, 0x55e57c91af00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f61498ef00 'a') --> $$ = nterm item (0x7ffcb9213910 'a') -0x55f61498ef00->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213910 } -0x55f61498ef00->Object::Object { 0x55f61498eee0, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb92138c8, 0x7ffcb9213910 } -0x7ffcb9213910->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213910 } + $1 = token 'a' (0x55e57c91af00 'a') +-> $$ = nterm item (0x7ffc1d59c990 'a') +0x55e57c91af00->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c990 } +0x55e57c91af00->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c948, 0x7ffc1d59c990 } +0x7ffc1d59c990->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c990 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffcb921384f->Object::Object { 0x55f61498eee0, 0x55f61498ef00 } -0x7ffcb92138f0->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'a' (0x7ffcb92138f0 'a') -0x7ffcb9213860->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213817, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860, 0x7ffcb92138f0 } -Shifting token 'a' (0x7ffcb9213860 'a') -0x55f61498ef20->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb92137ef, 0x7ffcb9213860 } -0x7ffcb9213860->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860 } +0x7ffc1d59c8cf->Object::Object { 0x55e57c91aee0, 0x55e57c91af00 } +0x7ffc1d59c970->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'a' (0x7ffc1d59c970 'a') +0x7ffc1d59c8e0->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c897, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +Shifting token 'a' (0x7ffc1d59c8e0 'a') +0x55e57c91af20->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c86f, 0x7ffc1d59c8e0 } +0x7ffc1d59c8e0->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0 } Entering state 2 Stack now 0 11 11 2 -0x7ffcb9213910->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20 } +0x7ffc1d59c990->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f61498ef20 'a') --> $$ = nterm item (0x7ffcb9213910 'a') -0x55f61498ef20->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213910 } -0x55f61498ef20->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb92138c8, 0x7ffcb9213910 } -0x7ffcb9213910->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213910 } + $1 = token 'a' (0x55e57c91af20 'a') +-> $$ = nterm item (0x7ffc1d59c990 'a') +0x55e57c91af20->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c990 } +0x55e57c91af20->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c948, 0x7ffc1d59c990 } +0x7ffc1d59c990->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c990 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffcb921384f->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20 } -0x7ffcb92138f0->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'a' (0x7ffcb92138f0 'a') -0x7ffcb9213860->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213817, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860, 0x7ffcb92138f0 } -Shifting token 'a' (0x7ffcb9213860 'a') -0x55f61498ef40->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb92137ef, 0x7ffcb9213860 } -0x7ffcb9213860->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213860 } +0x7ffc1d59c8cf->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20 } +0x7ffc1d59c970->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'a' (0x7ffc1d59c970 'a') +0x7ffc1d59c8e0->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c897, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +Shifting token 'a' (0x7ffc1d59c8e0 'a') +0x55e57c91af40->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c86f, 0x7ffc1d59c8e0 } +0x7ffc1d59c8e0->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c8e0 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffcb9213910->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40 } +0x7ffc1d59c990->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f61498ef40 'a') --> $$ = nterm item (0x7ffcb9213910 'a') -0x55f61498ef40->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213910 } -0x55f61498ef40->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb92138c8, 0x7ffcb9213910 } -0x7ffcb9213910->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213910 } + $1 = token 'a' (0x55e57c91af40 'a') +-> $$ = nterm item (0x7ffc1d59c990 'a') +0x55e57c91af40->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c990 } +0x55e57c91af40->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c948, 0x7ffc1d59c990 } +0x7ffc1d59c990->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c990 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffcb921384f->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40 } -0x7ffcb92138f0->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'p' (0x7ffcb92138f0 'p'Exception caught: cleaning lookahead and stack -0x55f61498ef40->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb92138f0 } -0x55f61498ef20->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb92138f0 } -0x55f61498ef00->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb92138f0 } -0x55f61498eee0->Object::~Object { 0x55f61498eee0, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x7ffcb92138f0 } +0x7ffc1d59c8cf->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40 } +0x7ffc1d59c970->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'p' (0x7ffc1d59c970 'p'Exception caught: cleaning lookahead and stack +0x55e57c91af40->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c970 } +0x55e57c91af20->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c970 } +0x55e57c91af00->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c970 } +0x55e57c91aee0->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x7ffc1d59c970 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -265980,481 +265924,160 @@ Entering state 0 Stack now 0 Reading a token -0x7ffcb921384f->Object::Object { } -0x7ffcb92138f0->Object::Object { 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'a' (0x7ffcb92138f0 'a') -0x7ffcb9213860->Object::Object { 0x7ffcb92138f0 } -0x7ffcb9213817->Object::Object { 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::~Object { 0x7ffcb9213817, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x7ffcb9213860, 0x7ffcb92138f0 } -Shifting token 'a' (0x7ffcb9213860 'a') -0x55f61498eee0->Object::Object { 0x7ffcb9213860 } -0x7ffcb92137ef->Object::Object { 0x55f61498eee0, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::~Object { 0x55f61498eee0, 0x7ffcb92137ef, 0x7ffcb9213860 } -0x7ffcb9213860->Object::~Object { 0x55f61498eee0, 0x7ffcb9213860 } +0x7ffc1d59c8cf->Object::Object { } +0x7ffc1d59c970->Object::Object { 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'a' (0x7ffc1d59c970 'a') +0x7ffc1d59c8e0->Object::Object { 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::Object { 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::~Object { 0x7ffc1d59c897, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +Shifting token 'a' (0x7ffc1d59c8e0 'a') +0x55e57c91aee0->Object::Object { 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c86f, 0x7ffc1d59c8e0 } +0x7ffc1d59c8e0->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c8e0 } Entering state 2 Stack now 0 2 -0x7ffcb9213910->Object::Object { 0x55f61498eee0 } +0x7ffc1d59c990->Object::Object { 0x55e57c91aee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f61498eee0 'a') --> $$ = nterm item (0x7ffcb9213910 'a') -0x55f61498eee0->Object::~Object { 0x55f61498eee0, 0x7ffcb9213910 } -0x55f61498eee0->Object::Object { 0x7ffcb9213910 } -0x7ffcb92138c8->Object::Object { 0x55f61498eee0, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::~Object { 0x55f61498eee0, 0x7ffcb92138c8, 0x7ffcb9213910 } -0x7ffcb9213910->Object::~Object { 0x55f61498eee0, 0x7ffcb9213910 } + $1 = token 'a' (0x55e57c91aee0 'a') +-> $$ = nterm item (0x7ffc1d59c990 'a') +0x55e57c91aee0->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c990 } +0x55e57c91aee0->Object::Object { 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c948, 0x7ffc1d59c990 } +0x7ffc1d59c990->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c990 } Entering state 11 Stack now 0 11 Reading a token -0x7ffcb921384f->Object::Object { 0x55f61498eee0 } -0x7ffcb92138f0->Object::Object { 0x55f61498eee0, 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x55f61498eee0, 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'a' (0x7ffcb92138f0 'a') -0x7ffcb9213860->Object::Object { 0x55f61498eee0, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::Object { 0x55f61498eee0, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::~Object { 0x55f61498eee0, 0x7ffcb9213817, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x55f61498eee0, 0x7ffcb9213860, 0x7ffcb92138f0 } -Shifting token 'a' (0x7ffcb9213860 'a') -0x55f61498ef00->Object::Object { 0x55f61498eee0, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb92137ef, 0x7ffcb9213860 } -0x7ffcb9213860->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860 } +0x7ffc1d59c8cf->Object::Object { 0x55e57c91aee0 } +0x7ffc1d59c970->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'a' (0x7ffc1d59c970 'a') +0x7ffc1d59c8e0->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c897, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +Shifting token 'a' (0x7ffc1d59c8e0 'a') +0x55e57c91af00->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c86f, 0x7ffc1d59c8e0 } +0x7ffc1d59c8e0->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0 } Entering state 2 Stack now 0 11 2 -0x7ffcb9213910->Object::Object { 0x55f61498eee0, 0x55f61498ef00 } +0x7ffc1d59c990->Object::Object { 0x55e57c91aee0, 0x55e57c91af00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f61498ef00 'a') --> $$ = nterm item (0x7ffcb9213910 'a') -0x55f61498ef00->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213910 } -0x55f61498ef00->Object::Object { 0x55f61498eee0, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb92138c8, 0x7ffcb9213910 } -0x7ffcb9213910->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213910 } + $1 = token 'a' (0x55e57c91af00 'a') +-> $$ = nterm item (0x7ffc1d59c990 'a') +0x55e57c91af00->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c990 } +0x55e57c91af00->Object::Object { 0x55e57c91aee0, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c948, 0x7ffc1d59c990 } +0x7ffc1d59c990->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c990 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffcb921384f->Object::Object { 0x55f61498eee0, 0x55f61498ef00 } -0x7ffcb92138f0->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'a' (0x7ffcb92138f0 'a') -0x7ffcb9213860->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213817, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860, 0x7ffcb92138f0 } -Shifting token 'a' (0x7ffcb9213860 'a') -0x55f61498ef20->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb92137ef, 0x7ffcb9213860 } -0x7ffcb9213860->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860 } +0x7ffc1d59c8cf->Object::Object { 0x55e57c91aee0, 0x55e57c91af00 } +0x7ffc1d59c970->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'a' (0x7ffc1d59c970 'a') +0x7ffc1d59c8e0->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c897, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +Shifting token 'a' (0x7ffc1d59c8e0 'a') +0x55e57c91af20->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c86f, 0x7ffc1d59c8e0 } +0x7ffc1d59c8e0->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0 } Entering state 2 Stack now 0 11 11 2 -0x7ffcb9213910->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20 } +0x7ffc1d59c990->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f61498ef20 'a') --> $$ = nterm item (0x7ffcb9213910 'a') -0x55f61498ef20->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213910 } -0x55f61498ef20->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb92138c8, 0x7ffcb9213910 } -0x7ffcb9213910->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213910 } + $1 = token 'a' (0x55e57c91af20 'a') +-> $$ = nterm item (0x7ffc1d59c990 'a') +0x55e57c91af20->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c990 } +0x55e57c91af20->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c948, 0x7ffc1d59c990 } +0x7ffc1d59c990->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c990 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffcb921384f->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20 } -0x7ffcb92138f0->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'a' (0x7ffcb92138f0 'a') -0x7ffcb9213860->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb9213817->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213817, 0x7ffcb9213860, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860, 0x7ffcb92138f0 } -Shifting token 'a' (0x7ffcb9213860 'a') -0x55f61498ef40->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213860 } -0x7ffcb92137ef->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb92137ef, 0x7ffcb9213860 } -0x7ffcb9213860->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213860 } +0x7ffc1d59c8cf->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20 } +0x7ffc1d59c970->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'a' (0x7ffc1d59c970 'a') +0x7ffc1d59c8e0->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c897->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c897, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0, 0x7ffc1d59c970 } +Shifting token 'a' (0x7ffc1d59c8e0 'a') +0x55e57c91af40->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c8e0 } +0x7ffc1d59c86f->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c86f, 0x7ffc1d59c8e0 } +0x7ffc1d59c8e0->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c8e0 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffcb9213910->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40 } +0x7ffc1d59c990->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55f61498ef40 'a') --> $$ = nterm item (0x7ffcb9213910 'a') -0x55f61498ef40->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213910 } -0x55f61498ef40->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213910 } -0x7ffcb92138c8->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb92138c8, 0x7ffcb9213910 } -0x7ffcb9213910->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb9213910 } + $1 = token 'a' (0x55e57c91af40 'a') +-> $$ = nterm item (0x7ffc1d59c990 'a') +0x55e57c91af40->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c990 } +0x55e57c91af40->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c990 } +0x7ffc1d59c948->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c948, 0x7ffc1d59c990 } +0x7ffc1d59c990->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c990 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffcb921384f->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40 } -0x7ffcb92138f0->Object::Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb921384f } -0x7ffcb921384f->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb921384f, 0x7ffcb92138f0 } -Next token is token 'p' (0x7ffcb92138f0 'p'Exception caught: cleaning lookahead and stack -0x55f61498ef40->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x55f61498ef40, 0x7ffcb92138f0 } -0x55f61498ef20->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x55f61498ef20, 0x7ffcb92138f0 } -0x55f61498ef00->Object::~Object { 0x55f61498eee0, 0x55f61498ef00, 0x7ffcb92138f0 } -0x55f61498eee0->Object::~Object { 0x55f61498eee0, 0x7ffcb92138f0 } -0x7ffcb92138f0->Object::~Object { 0x7ffcb92138f0 } +0x7ffc1d59c8cf->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40 } +0x7ffc1d59c970->Object::Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c8cf } +0x7ffc1d59c8cf->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c8cf, 0x7ffc1d59c970 } +Next token is token 'p' (0x7ffc1d59c970 'p'Exception caught: cleaning lookahead and stack +0x55e57c91af40->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x55e57c91af40, 0x7ffc1d59c970 } +0x55e57c91af20->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x55e57c91af20, 0x7ffc1d59c970 } +0x55e57c91af00->Object::~Object { 0x55e57c91aee0, 0x55e57c91af00, 0x7ffc1d59c970 } +0x55e57c91aee0->Object::~Object { 0x55e57c91aee0, 0x7ffc1d59c970 } +0x7ffc1d59c970->Object::~Object { 0x7ffc1d59c970 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr -./glr-regression.at:489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS stdout: exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae stderr: -stderr: exception caught: syntax error ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:354: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.c $LIBS +718. glr-regression.at:355: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr.cc ... +./glr-regression.at:355: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y ./c++.at:1362: $PREPARSER ./input aaaaE -./c++.at:1363: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaT stderr: -./c++.at:1363: $PREPARSER ./input aaaap ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap -stderr: ./c++.at:1362: $PREPARSER ./input aaaaR -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffd8be9014f->Object::Object { } -0x7ffd8be90210->Object::Object { 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'a' (0x7ffd8be90210 'a') -0x7ffd8be90180->Object::Object { 0x7ffd8be90210 } -0x7ffd8be90137->Object::Object { 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90137->Object::~Object { 0x7ffd8be90137, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x7ffd8be90180, 0x7ffd8be90210 } -Shifting token 'a' (0x7ffd8be90180 'a') -0x55d6c9560ee0->Object::Object { 0x7ffd8be90180 } -0x7ffd8be9010f->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be9010f, 0x7ffd8be90180 } -0x7ffd8be90180->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90180 } -Entering state 1 -Stack now 0 1 -0x7ffd8be90230->Object::Object { 0x55d6c9560ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d6c9560ee0 'a') --> $$ = nterm item (0x7ffd8be90230 'a') -0x55d6c9560ee0->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90230 } -0x55d6c9560ee0->Object::Object { 0x7ffd8be90230 } -0x7ffd8be901e8->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be901e8, 0x7ffd8be90230 } -0x7ffd8be90230->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90230 } -Entering state 10 -Stack now 0 10 -Reading a token -0x7ffd8be9014f->Object::Object { 0x55d6c9560ee0 } -0x7ffd8be90210->Object::Object { 0x55d6c9560ee0, 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'a' (0x7ffd8be90210 'a') -0x7ffd8be90180->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90210 } -0x7ffd8be90137->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90137->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90137, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90180, 0x7ffd8be90210 } -Shifting token 'a' (0x7ffd8be90180 'a') -0x55d6c9560f00->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be9010f, 0x7ffd8be90180 } -0x7ffd8be90180->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180 } -Entering state 1 -Stack now 0 10 1 -0x7ffd8be90230->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d6c9560f00 'a') --> $$ = nterm item (0x7ffd8be90230 'a') -0x55d6c9560f00->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90230 } -0x55d6c9560f00->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be901e8, 0x7ffd8be90230 } -0x7ffd8be90230->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90230 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0x7ffd8be9014f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00 } -0x7ffd8be90210->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'a' (0x7ffd8be90210 'a') -0x7ffd8be90180->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90210 } -0x7ffd8be90137->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90137->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90137, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180, 0x7ffd8be90210 } -Shifting token 'a' (0x7ffd8be90180 'a') -0x55d6c9560f20->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be9010f, 0x7ffd8be90180 } -0x7ffd8be90180->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180 } -Entering state 1 -Stack now 0 10 10 1 -0x7ffd8be90230->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d6c9560f20 'a') --> $$ = nterm item (0x7ffd8be90230 'a') -0x55d6c9560f20->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90230 } -0x55d6c9560f20->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be901e8, 0x7ffd8be90230 } -0x7ffd8be90230->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90230 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x7ffd8be9014f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20 } -0x7ffd8be90210->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'a' (0x7ffd8be90210 'a') -0x7ffd8be90180->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90210 } -0x7ffd8be90137->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90137->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90137, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180, 0x7ffd8be90210 } -Shifting token 'a' (0x7ffd8be90180 'a') -0x55d6c9560f40->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be9010f, 0x7ffd8be90180 } -0x7ffd8be90180->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90180 } -Entering state 1 -Stack now 0 10 10 10 1 -0x7ffd8be90230->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d6c9560f40 'a') --> $$ = nterm item (0x7ffd8be90230 'a') -0x55d6c9560f40->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90230 } -0x55d6c9560f40->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be901e8, 0x7ffd8be90230 } -0x7ffd8be90230->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90230 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x7ffd8be9014f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40 } -0x7ffd8be90210->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'p' (0x7ffd8be90210 'p'Exception caught: cleaning lookahead and stack -0x55d6c9560f40->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90210 } -0x55d6c9560f20->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90210 } -0x55d6c9560f00->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90210 } -0x55d6c9560ee0->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x7ffd8be90210 } -exception caught: printer -end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x7ffd8be9014f->Object::Object { } -0x7ffd8be90210->Object::Object { 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'a' (0x7ffd8be90210 'a') -0x7ffd8be90180->Object::Object { 0x7ffd8be90210 } -0x7ffd8be90137->Object::Object { 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90137->Object::~Object { 0x7ffd8be90137, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x7ffd8be90180, 0x7ffd8be90210 } -Shifting token 'a' (0x7ffd8be90180 'a') -0x55d6c9560ee0->Object::Object { 0x7ffd8be90180 } -0x7ffd8be9010f->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be9010f, 0x7ffd8be90180 } -0x7ffd8be90180->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90180 } -Entering state 1 -Stack now 0 1 -0x7ffd8be90230->Object::Object { 0x55d6c9560ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d6c9560ee0 'a') --> $$ = nterm item (0x7ffd8be90230 'a') -0x55d6c9560ee0->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90230 } -0x55d6c9560ee0->Object::Object { 0x7ffd8be90230 } -0x7ffd8be901e8->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be901e8, 0x7ffd8be90230 } -0x7ffd8be90230->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90230 } -Entering state 10 -Stack now 0 10 -Reading a token -0x7ffd8be9014f->Object::Object { 0x55d6c9560ee0 } -0x7ffd8be90210->Object::Object { 0x55d6c9560ee0, 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'a' (0x7ffd8be90210 'a') -0x7ffd8be90180->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90210 } -0x7ffd8be90137->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90137->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90137, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90180, 0x7ffd8be90210 } -Shifting token 'a' (0x7ffd8be90180 'a') -0x55d6c9560f00->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be9010f, 0x7ffd8be90180 } -0x7ffd8be90180->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180 } -Entering state 1 -Stack now 0 10 1 -0x7ffd8be90230->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d6c9560f00 'a') --> $$ = nterm item (0x7ffd8be90230 'a') -0x55d6c9560f00->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90230 } -0x55d6c9560f00->Object::Object { 0x55d6c9560ee0, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be901e8, 0x7ffd8be90230 } -0x7ffd8be90230->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90230 } -Entering state 10 -Stack now 0 10 10 -Reading a token -0x7ffd8be9014f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00 } -0x7ffd8be90210->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'a' (0x7ffd8be90210 'a') -0x7ffd8be90180->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90210 } -0x7ffd8be90137->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90137->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90137, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180, 0x7ffd8be90210 } -Shifting token 'a' (0x7ffd8be90180 'a') -0x55d6c9560f20->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be9010f, 0x7ffd8be90180 } -0x7ffd8be90180->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180 } -Entering state 1 -Stack now 0 10 10 1 -0x7ffd8be90230->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d6c9560f20 'a') --> $$ = nterm item (0x7ffd8be90230 'a') -0x55d6c9560f20->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90230 } -0x55d6c9560f20->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be901e8, 0x7ffd8be90230 } -0x7ffd8be90230->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90230 } -Entering state 10 -Stack now 0 10 10 10 -Reading a token -0x7ffd8be9014f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20 } -0x7ffd8be90210->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'a' (0x7ffd8be90210 'a') -0x7ffd8be90180->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90210 } -0x7ffd8be90137->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90137->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90137, 0x7ffd8be90180, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180, 0x7ffd8be90210 } -Shifting token 'a' (0x7ffd8be90180 'a') -0x55d6c9560f40->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90180 } -0x7ffd8be9010f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be9010f, 0x7ffd8be90180 } -0x7ffd8be90180->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90180 } -Entering state 1 -Stack now 0 10 10 10 1 -0x7ffd8be90230->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d6c9560f40 'a') --> $$ = nterm item (0x7ffd8be90230 'a') -0x55d6c9560f40->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90230 } -0x55d6c9560f40->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90230 } -0x7ffd8be901e8->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be901e8, 0x7ffd8be90230 } -0x7ffd8be90230->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90230 } -Entering state 10 -Stack now 0 10 10 10 10 -Reading a token -0x7ffd8be9014f->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40 } -0x7ffd8be90210->Object::Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be9014f } -0x7ffd8be9014f->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be9014f, 0x7ffd8be90210 } -Next token is token 'p' (0x7ffd8be90210 'p'Exception caught: cleaning lookahead and stack -0x55d6c9560f40->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x55d6c9560f40, 0x7ffd8be90210 } -0x55d6c9560f20->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x55d6c9560f20, 0x7ffd8be90210 } -0x55d6c9560f00->Object::~Object { 0x55d6c9560ee0, 0x55d6c9560f00, 0x7ffd8be90210 } -0x55d6c9560ee0->Object::~Object { 0x55d6c9560ee0, 0x7ffd8be90210 } -0x7ffd8be90210->Object::~Object { 0x7ffd8be90210 } -exception caught: printer -end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -stdout: +./glr-regression.at:355: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS ======== Testing with C++ standard flags: '' -exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae ./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR +stdout: +./c++.at:856: $PREPARSER ./input stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:856: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: stdout: ======== Testing with C++ standard flags: '' ./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:851: $PREPARSER ./input -stderr: -./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -678. c++.at:848: ok -stderr: -stdout: -./glr-regression.at:488: $PREPARSER ./glr-regr3 input.txt - -stderr: -./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -720. glr-regression.at:488: ok -stderr: -stdout: -./glr-regression.at:206: $PREPARSER ./glr-regr1 BPBPB -stderr: -stderr: - -stdout: -./glr-regression.at:355: $PREPARSER ./glr-regr2a input1.txt -./glr-regression.at:206: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ... -./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y -715. glr-regression.at:206: ok -./glr-regression.at:355: $PREPARSER ./glr-regr2a input2.txt -stderr: -./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ... -./glr-regression.at:355: $PREPARSER ./glr-regr2a input3.txt - -./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y -stderr: -./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -718. glr-regression.at:355: ok -./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS -./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS - -724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ... -./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ... -./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -./glr-regression.at:593: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:1360: $PREPARSER ./input aaaas @@ -266478,57 +266101,57 @@ Entering state 0 Stack now 0 Reading a token -0x564580703b40->Object::Object { } -Next token is token 'a' (0x564580703b40 'a') -Shifting token 'a' (0x564580703b40 'a') +0x56445a0f3b40->Object::Object { } +Next token is token 'a' (0x56445a0f3b40 'a') +Shifting token 'a' (0x56445a0f3b40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564580703b40 'a') --> $$ = nterm item (0x564580703b40 'a') + $1 = token 'a' (0x56445a0f3b40 'a') +-> $$ = nterm item (0x56445a0f3b40 'a') Entering state 11 Stack now 0 11 Reading a token -0x564580703b90->Object::Object { 0x564580703b40 } -Next token is token 'a' (0x564580703b90 'a') -Shifting token 'a' (0x564580703b90 'a') +0x56445a0f3b90->Object::Object { 0x56445a0f3b40 } +Next token is token 'a' (0x56445a0f3b90 'a') +Shifting token 'a' (0x56445a0f3b90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564580703b90 'a') --> $$ = nterm item (0x564580703b90 'a') + $1 = token 'a' (0x56445a0f3b90 'a') +-> $$ = nterm item (0x56445a0f3b90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x564580703be0->Object::Object { 0x564580703b40, 0x564580703b90 } -Next token is token 'a' (0x564580703be0 'a') -Shifting token 'a' (0x564580703be0 'a') +0x56445a0f3be0->Object::Object { 0x56445a0f3b40, 0x56445a0f3b90 } +Next token is token 'a' (0x56445a0f3be0 'a') +Shifting token 'a' (0x56445a0f3be0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564580703be0 'a') --> $$ = nterm item (0x564580703be0 'a') + $1 = token 'a' (0x56445a0f3be0 'a') +-> $$ = nterm item (0x56445a0f3be0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x564580703c30->Object::Object { 0x564580703b40, 0x564580703b90, 0x564580703be0 } -Next token is token 'a' (0x564580703c30 'a') -Shifting token 'a' (0x564580703c30 'a') +0x56445a0f3c30->Object::Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0 } +Next token is token 'a' (0x56445a0f3c30 'a') +Shifting token 'a' (0x56445a0f3c30 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564580703c30 'a') --> $$ = nterm item (0x564580703c30 'a') + $1 = token 'a' (0x56445a0f3c30 'a') +-> $$ = nterm item (0x56445a0f3c30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x564580703c80->Object::Object { 0x564580703b40, 0x564580703b90, 0x564580703be0, 0x564580703c30 } -Next token is token 'p' (0x564580703c80 'p'Exception caught: cleaning lookahead and stack -0x564580703c80->Object::~Object { 0x564580703b40, 0x564580703b90, 0x564580703be0, 0x564580703c30, 0x564580703c80 } -0x564580703c30->Object::~Object { 0x564580703b40, 0x564580703b90, 0x564580703be0, 0x564580703c30 } -0x564580703be0->Object::~Object { 0x564580703b40, 0x564580703b90, 0x564580703be0 } -0x564580703b90->Object::~Object { 0x564580703b40, 0x564580703b90 } -0x564580703b40->Object::~Object { 0x564580703b40 } +0x56445a0f3c80->Object::Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0, 0x56445a0f3c30 } +Next token is token 'p' (0x56445a0f3c80 'p'Exception caught: cleaning lookahead and stack +0x56445a0f3c80->Object::~Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0, 0x56445a0f3c30, 0x56445a0f3c80 } +0x56445a0f3c30->Object::~Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0, 0x56445a0f3c30 } +0x56445a0f3be0->Object::~Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0 } +0x56445a0f3b90->Object::~Object { 0x56445a0f3b40, 0x56445a0f3b90 } +0x56445a0f3b40->Object::~Object { 0x56445a0f3b40 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -266537,606 +266160,663 @@ Entering state 0 Stack now 0 Reading a token -0x564580703b40->Object::Object { } -Next token is token 'a' (0x564580703b40 'a') -Shifting token 'a' (0x564580703b40 'a') +0x56445a0f3b40->Object::Object { } +Next token is token 'a' (0x56445a0f3b40 'a') +Shifting token 'a' (0x56445a0f3b40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564580703b40 'a') --> $$ = nterm item (0x564580703b40 'a') + $1 = token 'a' (0x56445a0f3b40 'a') +-> $$ = nterm item (0x56445a0f3b40 'a') Entering state 11 Stack now 0 11 Reading a token -0x564580703b90->Object::Object { 0x564580703b40 } -Next token is token 'a' (0x564580703b90 'a') -Shifting token 'a' (0x564580703b90 'a') +0x56445a0f3b90->Object::Object { 0x56445a0f3b40 } +Next token is token 'a' (0x56445a0f3b90 'a') +Shifting token 'a' (0x56445a0f3b90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564580703b90 'a') --> $$ = nterm item (0x564580703b90 'a') + $1 = token 'a' (0x56445a0f3b90 'a') +-> $$ = nterm item (0x56445a0f3b90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x564580703be0->Object::Object { 0x564580703b40, 0x564580703b90 } -Next token is token 'a' (0x564580703be0 'a') -Shifting token 'a' (0x564580703be0 'a') +0x56445a0f3be0->Object::Object { 0x56445a0f3b40, 0x56445a0f3b90 } +Next token is token 'a' (0x56445a0f3be0 'a') +Shifting token 'a' (0x56445a0f3be0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564580703be0 'a') --> $$ = nterm item (0x564580703be0 'a') + $1 = token 'a' (0x56445a0f3be0 'a') +-> $$ = nterm item (0x56445a0f3be0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x564580703c30->Object::Object { 0x564580703b40, 0x564580703b90, 0x564580703be0 } -Next token is token 'a' (0x564580703c30 'a') -Shifting token 'a' (0x564580703c30 'a') +0x56445a0f3c30->Object::Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0 } +Next token is token 'a' (0x56445a0f3c30 'a') +Shifting token 'a' (0x56445a0f3c30 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x564580703c30 'a') --> $$ = nterm item (0x564580703c30 'a') + $1 = token 'a' (0x56445a0f3c30 'a') +-> $$ = nterm item (0x56445a0f3c30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x564580703c80->Object::Object { 0x564580703b40, 0x564580703b90, 0x564580703be0, 0x564580703c30 } -Next token is token 'p' (0x564580703c80 'p'Exception caught: cleaning lookahead and stack -0x564580703c80->Object::~Object { 0x564580703b40, 0x564580703b90, 0x564580703be0, 0x564580703c30, 0x564580703c80 } -0x564580703c30->Object::~Object { 0x564580703b40, 0x564580703b90, 0x564580703be0, 0x564580703c30 } -0x564580703be0->Object::~Object { 0x564580703b40, 0x564580703b90, 0x564580703be0 } -0x564580703b90->Object::~Object { 0x564580703b40, 0x564580703b90 } -0x564580703b40->Object::~Object { 0x564580703b40 } +0x56445a0f3c80->Object::Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0, 0x56445a0f3c30 } +Next token is token 'p' (0x56445a0f3c80 'p'Exception caught: cleaning lookahead and stack +0x56445a0f3c80->Object::~Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0, 0x56445a0f3c30, 0x56445a0f3c80 } +0x56445a0f3c30->Object::~Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0, 0x56445a0f3c30 } +0x56445a0f3be0->Object::~Object { 0x56445a0f3b40, 0x56445a0f3b90, 0x56445a0f3be0 } +0x56445a0f3b90->Object::~Object { 0x56445a0f3b40, 0x56445a0f3b90 } +0x56445a0f3b40->Object::~Object { 0x56445a0f3b40 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr -stderr: -stdout: stdout: exception caught: printer -./c++.at:1066: $PREPARSER ./input < in ./c++.at:1360: $PREPARSER ./input aaaae stderr: exception caught: syntax error ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaaE -./c++.at:1066: $PREPARSER ./input < in stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input aaaaT stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -./c++.at:1360: $PREPARSER ./input aaaaR stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./c++.at:1064: $PREPARSER ./input < in stderr: +./c++.at:1360: $PREPARSER ./input aaaaR +error: invalid expression +caught error error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +caught error +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -======== Testing with C++ standard flags: '' +./c++.at:1064: $PREPARSER ./input < in ./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -stderr: -stdout: -./glr-regression.at:592: $PREPARSER ./glr-regr4 stderr: -./glr-regression.at:592: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -723. glr-regression.at:592: ok +error: invalid expression +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1064: $PREPARSER ./input < in stderr: +error: invalid character +./c++.at:1064: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +682. c++.at:1064: ok stderr: stdout: -stdout: -./c++.at:1555: $PREPARSER ./test -./c++.at:1066: ./check +./c++.at:851: $PREPARSER ./input + stderr: +./c++.at:851: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +678. c++.at:848: ok -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ... -./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y +719. glr-regression.at:356: testing Improper handling of embedded actions and dollar(-N) in GLR parsers: glr2.cc ... +./glr-regression.at:356: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr2a.cc glr-regr2a.y stderr: stdout: stderr: stdout: -./glr-regression.at:489: $PREPARSER ./glr-regr3 input.txt -======== Testing with C++ standard flags: '' -./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS +./c++.at:1363: $PREPARSER ./input aaaas +./glr-regression.at:206: $PREPARSER ./glr-regr1 BPBPB stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./c++.at:1555: ./check -./glr-regression.at:489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y -721. glr-regression.at:489: ok +./c++.at:1363: $PREPARSER ./input aaaal +./glr-regression.at:206: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +715. glr-regression.at:206: ok +./c++.at:1363: $PREPARSER ./input i +./glr-regression.at:356: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr2a glr-regr2a.cc $LIBS +stderr: +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +720. glr-regression.at:488: testing Improper merging of GLR delayed action sets: glr.c ... +./glr-regression.at:488: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.c glr-regr3.y -./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaap +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: ./c++.at:1065: $PREPARSER ./input < in +./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: -727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ... -./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y stderr: -stdout: error: invalid expression caught error error: invalid character caught error ./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:857: $PREPARSER ./input -stderr: -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -======== Testing with C++ standard flags: '' -./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -error: invalid expression -./glr-regression.at:597: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1065: $PREPARSER ./input < in -stderr: -error: invalid character -./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -683. c++.at:1065: ok - -stderr: -stdout: -./c++.at:1361: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ... -./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./glr-regression.at:207: $PREPARSER ./glr-regr1 BPBPB -stderr: -./c++.at:1361: $PREPARSER ./input i -stderr: -./glr-regression.at:207: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -716. glr-regression.at:207: ok -./c++.at:1361: $PREPARSER ./input aaaap -stderr: - -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS -./c++.at:1361: $PREPARSER ./input --debug aaaap -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x5596fdac0b40->Object::Object { } -Next token is token 'a' (0x5596fdac0b40 'a') -Shifting token 'a' (0x5596fdac0b40 'a') +0x7ffdb077d6ff->Object::Object { } +0x7ffdb077d7c0->Object::Object { 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'a' (0x7ffdb077d7c0 'a') +0x7ffdb077d730->Object::Object { 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::Object { 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::~Object { 0x7ffdb077d6e7, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x7ffdb077d730, 0x7ffdb077d7c0 } +Shifting token 'a' (0x7ffdb077d730 'a') +0x55f386c4cee0->Object::Object { 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::Object { 0x55f386c4cee0, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d6bf, 0x7ffdb077d730 } +0x7ffdb077d730->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d730 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5596fdac0b40 'a') --> $$ = nterm item (0x5596fdac0b40 'a') +0x7ffdb077d7e0->Object::Object { 0x55f386c4cee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55f386c4cee0 'a') +-> $$ = nterm item (0x7ffdb077d7e0 'a') +0x55f386c4cee0->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d7e0 } +0x55f386c4cee0->Object::Object { 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::Object { 0x55f386c4cee0, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d798, 0x7ffdb077d7e0 } +0x7ffdb077d7e0->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d7e0 } Entering state 10 Stack now 0 10 Reading a token -0x5596fdac0b90->Object::Object { 0x5596fdac0b40 } -Next token is token 'a' (0x5596fdac0b90 'a') -Shifting token 'a' (0x5596fdac0b90 'a') +0x7ffdb077d6ff->Object::Object { 0x55f386c4cee0 } +0x7ffdb077d7c0->Object::Object { 0x55f386c4cee0, 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'a' (0x7ffdb077d7c0 'a') +0x7ffdb077d730->Object::Object { 0x55f386c4cee0, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::Object { 0x55f386c4cee0, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d6e7, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d730, 0x7ffdb077d7c0 } +Shifting token 'a' (0x7ffdb077d730 'a') +0x55f386c4cf00->Object::Object { 0x55f386c4cee0, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d6bf, 0x7ffdb077d730 } +0x7ffdb077d730->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5596fdac0b90 'a') --> $$ = nterm item (0x5596fdac0b90 'a') +0x7ffdb077d7e0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55f386c4cf00 'a') +-> $$ = nterm item (0x7ffdb077d7e0 'a') +0x55f386c4cf00->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7e0 } +0x55f386c4cf00->Object::Object { 0x55f386c4cee0, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d798, 0x7ffdb077d7e0 } +0x7ffdb077d7e0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7e0 } Entering state 10 Stack now 0 10 10 Reading a token -0x5596fdac0be0->Object::Object { 0x5596fdac0b40, 0x5596fdac0b90 } -Next token is token 'a' (0x5596fdac0be0 'a') -Shifting token 'a' (0x5596fdac0be0 'a') +0x7ffdb077d6ff->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00 } +0x7ffdb077d7c0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'a' (0x7ffdb077d7c0 'a') +0x7ffdb077d730->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d6e7, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730, 0x7ffdb077d7c0 } +Shifting token 'a' (0x7ffdb077d730 'a') +0x55f386c4cf20->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d6bf, 0x7ffdb077d730 } +0x7ffdb077d730->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5596fdac0be0 'a') --> $$ = nterm item (0x5596fdac0be0 'a') +0x7ffdb077d7e0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55f386c4cf20 'a') +-> $$ = nterm item (0x7ffdb077d7e0 'a') +0x55f386c4cf20->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7e0 } +0x55f386c4cf20->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d798, 0x7ffdb077d7e0 } +0x7ffdb077d7e0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7e0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x5596fdac0c30->Object::Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0 } -Next token is token 'a' (0x5596fdac0c30 'a') -Shifting token 'a' (0x5596fdac0c30 'a') +0x7ffdb077d6ff->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20 } +0x7ffdb077d7c0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'a' (0x7ffdb077d7c0 'a') +0x7ffdb077d730->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d6e7, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730, 0x7ffdb077d7c0 } +Shifting token 'a' (0x7ffdb077d730 'a') +0x55f386c4cf40->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d6bf, 0x7ffdb077d730 } +0x7ffdb077d730->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d730 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5596fdac0c30 'a') --> $$ = nterm item (0x5596fdac0c30 'a') +0x7ffdb077d7e0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55f386c4cf40 'a') +-> $$ = nterm item (0x7ffdb077d7e0 'a') +0x55f386c4cf40->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d7e0 } +0x55f386c4cf40->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d798, 0x7ffdb077d7e0 } +0x7ffdb077d7e0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d7e0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x5596fdac0c80->Object::Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0, 0x5596fdac0c30 } -Next token is token 'p' (0x5596fdac0c80 'p'Exception caught: cleaning lookahead and stack -0x5596fdac0c80->Object::~Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0, 0x5596fdac0c30, 0x5596fdac0c80 } -0x5596fdac0c30->Object::~Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0, 0x5596fdac0c30 } -0x5596fdac0be0->Object::~Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0 } -0x5596fdac0b90->Object::~Object { 0x5596fdac0b40, 0x5596fdac0b90 } -0x5596fdac0b40->Object::~Object { 0x5596fdac0b40 } +0x7ffdb077d6ff->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40 } +0x7ffdb077d7c0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'p' (0x7ffdb077d7c0 'p'Exception caught: cleaning lookahead and stack +0x55f386c4cf40->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d7c0 } +0x55f386c4cf20->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7c0 } +0x55f386c4cf00->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7c0 } +0x55f386c4cee0->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x7ffdb077d7c0 } exception caught: printer end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1065: $PREPARSER ./input < in +stderr: stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x5596fdac0b40->Object::Object { } -Next token is token 'a' (0x5596fdac0b40 'a') -Shifting token 'a' (0x5596fdac0b40 'a') +0x7ffdb077d6ff->Object::Object { } +0x7ffdb077d7c0->Object::Object { 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'a' (0x7ffdb077d7c0 'a') +0x7ffdb077d730->Object::Object { 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::Object { 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::~Object { 0x7ffdb077d6e7, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x7ffdb077d730, 0x7ffdb077d7c0 } +Shifting token 'a' (0x7ffdb077d730 'a') +0x55f386c4cee0->Object::Object { 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::Object { 0x55f386c4cee0, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d6bf, 0x7ffdb077d730 } +0x7ffdb077d730->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d730 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5596fdac0b40 'a') --> $$ = nterm item (0x5596fdac0b40 'a') +0x7ffdb077d7e0->Object::Object { 0x55f386c4cee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55f386c4cee0 'a') +-> $$ = nterm item (0x7ffdb077d7e0 'a') +0x55f386c4cee0->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d7e0 } +0x55f386c4cee0->Object::Object { 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::Object { 0x55f386c4cee0, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d798, 0x7ffdb077d7e0 } +0x7ffdb077d7e0->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d7e0 } Entering state 10 Stack now 0 10 Reading a token -0x5596fdac0b90->Object::Object { 0x5596fdac0b40 } -Next token is token 'a' (0x5596fdac0b90 'a') -Shifting token 'a' (0x5596fdac0b90 'a') +0x7ffdb077d6ff->Object::Object { 0x55f386c4cee0 } +0x7ffdb077d7c0->Object::Object { 0x55f386c4cee0, 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'a' (0x7ffdb077d7c0 'a') +0x7ffdb077d730->Object::Object { 0x55f386c4cee0, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::Object { 0x55f386c4cee0, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d6e7, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d730, 0x7ffdb077d7c0 } +Shifting token 'a' (0x7ffdb077d730 'a') +0x55f386c4cf00->Object::Object { 0x55f386c4cee0, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d6bf, 0x7ffdb077d730 } +0x7ffdb077d730->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5596fdac0b90 'a') --> $$ = nterm item (0x5596fdac0b90 'a') +0x7ffdb077d7e0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55f386c4cf00 'a') +-> $$ = nterm item (0x7ffdb077d7e0 'a') +0x55f386c4cf00->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7e0 } +0x55f386c4cf00->Object::Object { 0x55f386c4cee0, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d798, 0x7ffdb077d7e0 } +0x7ffdb077d7e0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7e0 } Entering state 10 Stack now 0 10 10 Reading a token -0x5596fdac0be0->Object::Object { 0x5596fdac0b40, 0x5596fdac0b90 } -Next token is token 'a' (0x5596fdac0be0 'a') -Shifting token 'a' (0x5596fdac0be0 'a') +0x7ffdb077d6ff->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00 } +0x7ffdb077d7c0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'a' (0x7ffdb077d7c0 'a') +0x7ffdb077d730->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d6e7, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730, 0x7ffdb077d7c0 } +Shifting token 'a' (0x7ffdb077d730 'a') +0x55f386c4cf20->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d6bf, 0x7ffdb077d730 } +0x7ffdb077d730->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5596fdac0be0 'a') --> $$ = nterm item (0x5596fdac0be0 'a') +0x7ffdb077d7e0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55f386c4cf20 'a') +-> $$ = nterm item (0x7ffdb077d7e0 'a') +0x55f386c4cf20->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7e0 } +0x55f386c4cf20->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d798, 0x7ffdb077d7e0 } +0x7ffdb077d7e0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7e0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x5596fdac0c30->Object::Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0 } -Next token is token 'a' (0x5596fdac0c30 'a') -Shifting token 'a' (0x5596fdac0c30 'a') +0x7ffdb077d6ff->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20 } +0x7ffdb077d7c0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'a' (0x7ffdb077d7c0 'a') +0x7ffdb077d730->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d6e7->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d6e7, 0x7ffdb077d730, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730, 0x7ffdb077d7c0 } +Shifting token 'a' (0x7ffdb077d730 'a') +0x55f386c4cf40->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d730 } +0x7ffdb077d6bf->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d6bf, 0x7ffdb077d730 } +0x7ffdb077d730->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d730 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5596fdac0c30 'a') --> $$ = nterm item (0x5596fdac0c30 'a') +0x7ffdb077d7e0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55f386c4cf40 'a') +-> $$ = nterm item (0x7ffdb077d7e0 'a') +0x55f386c4cf40->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d7e0 } +0x55f386c4cf40->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d7e0 } +0x7ffdb077d798->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d798, 0x7ffdb077d7e0 } +0x7ffdb077d7e0->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d7e0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x5596fdac0c80->Object::Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0, 0x5596fdac0c30 } -Next token is token 'p' (0x5596fdac0c80 'p'Exception caught: cleaning lookahead and stack -0x5596fdac0c80->Object::~Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0, 0x5596fdac0c30, 0x5596fdac0c80 } -0x5596fdac0c30->Object::~Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0, 0x5596fdac0c30 } -0x5596fdac0be0->Object::~Object { 0x5596fdac0b40, 0x5596fdac0b90, 0x5596fdac0be0 } -0x5596fdac0b90->Object::~Object { 0x5596fdac0b40, 0x5596fdac0b90 } -0x5596fdac0b40->Object::~Object { 0x5596fdac0b40 } +0x7ffdb077d6ff->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40 } +0x7ffdb077d7c0->Object::Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d6ff } +0x7ffdb077d6ff->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d6ff, 0x7ffdb077d7c0 } +Next token is token 'p' (0x7ffdb077d7c0 'p'Exception caught: cleaning lookahead and stack +0x55f386c4cf40->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x55f386c4cf40, 0x7ffdb077d7c0 } +0x55f386c4cf20->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x55f386c4cf20, 0x7ffdb077d7c0 } +0x55f386c4cf00->Object::~Object { 0x55f386c4cee0, 0x55f386c4cf00, 0x7ffdb077d7c0 } +0x55f386c4cee0->Object::~Object { 0x55f386c4cee0, 0x7ffdb077d7c0 } +0x7ffdb077d7c0->Object::~Object { 0x7ffdb077d7c0 } exception caught: printer end { } -./c++.at:1361: grep '^exception caught: printer$' stderr +./c++.at:1363: grep '^exception caught: printer$' stderr +error: invalid expression +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +721. glr-regression.at:489: testing Improper merging of GLR delayed action sets: glr.cc ... +./glr-regression.at:489: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y stdout: exception caught: printer -./c++.at:1361: $PREPARSER ./input aaaae -stderr: +./c++.at:1363: $PREPARSER ./input aaaae +./c++.at:1065: $PREPARSER ./input < in stderr: exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -729. glr-regression.at:670: testing User destructor for unresolved GLR semantic value: glr.c ... -./glr-regression.at:670: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.c glr-regr5.y -./glr-regression.at:356: $PREPARSER ./glr-regr2a input1.txt -./c++.at:1361: $PREPARSER ./input aaaaE +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +error: invalid character +./c++.at:1065: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:488: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr3 glr-regr3.c $LIBS +./c++.at:1363: $PREPARSER ./input aaaaE stderr: +683. c++.at:1065: ok exception caught: syntax error, unexpected end of file, expecting 'a' -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:356: $PREPARSER ./glr-regr2a input2.txt -./c++.at:1361: $PREPARSER ./input aaaaT +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT stderr: + +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:489: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS +./c++.at:1363: $PREPARSER ./input aaaaR stderr: -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:670: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr5 glr-regr5.c $LIBS -./c++.at:1361: $PREPARSER ./input aaaaR +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -./glr-regression.at:356: $PREPARSER ./glr-regr2a input3.txt -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./glr-regression.at:354: $PREPARSER ./glr-regr2a input1.txt +722. glr-regression.at:490: testing Improper merging of GLR delayed action sets: glr2.cc ... +./glr-regression.at:490: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr3.cc glr-regr3.y stderr: -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -719. glr-regression.at:356: ok - +./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:354: $PREPARSER ./glr-regr2a input2.txt +stderr: +./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:1363: $PREPARSER ./input aaaas +./glr-regression.at:354: $PREPARSER ./glr-regr2a input3.txt +./glr-regression.at:490: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr3 glr-regr3.cc $LIBS +./c++.at:1361: $PREPARSER ./input aaaas +stderr: stderr: exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:354: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal +717. glr-regression.at:354: ok stderr: exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ... -./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y -./c++.at:1363: $PREPARSER ./input i +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input i + stderr: exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap +stdout: +./c++.at:1555: $PREPARSER ./test stderr: -./glr-regression.at:671: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS +stderr: +stdout: +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1066: $PREPARSER ./input < in +======== Testing with C++ standard flags: '' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./c++.at:1361: $PREPARSER ./input --debug aaaap +stderr: +./c++.at:1066: $PREPARSER ./input < in Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff73fd35bf->Object::Object { } -0x7fff73fd36a0->Object::Object { 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'a' (0x7fff73fd36a0 'a') -0x7fff73fd35e0->Object::Object { 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x7fff73fd35e0, 0x7fff73fd36a0 } -Shifting token 'a' (0x7fff73fd35e0 'a') -0x55c95be78ee0->Object::Object { 0x7fff73fd35e0 } -0x7fff73fd35e0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd35e0 } +0x556b46cd0b40->Object::Object { } +Next token is token 'a' (0x556b46cd0b40 'a') +Shifting token 'a' (0x556b46cd0b40 'a') Entering state 1 Stack now 0 1 -0x7fff73fd36c0->Object::Object { 0x55c95be78ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c95be78ee0 'a') --> $$ = nterm item (0x7fff73fd36c0 'a') -0x55c95be78ee0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd36c0 } -0x55c95be78ee0->Object::Object { 0x7fff73fd36c0 } -0x7fff73fd36c0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd36c0 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556b46cd0b40 'a') +-> $$ = nterm item (0x556b46cd0b40 'a') Entering state 10 Stack now 0 10 Reading a token -0x7fff73fd35bf->Object::Object { 0x55c95be78ee0 } -0x7fff73fd36a0->Object::Object { 0x55c95be78ee0, 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x55c95be78ee0, 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'a' (0x7fff73fd36a0 'a') -0x7fff73fd35e0->Object::Object { 0x55c95be78ee0, 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd35e0, 0x7fff73fd36a0 } -Shifting token 'a' (0x7fff73fd35e0 'a') -0x55c95be78f00->Object::Object { 0x55c95be78ee0, 0x7fff73fd35e0 } -0x7fff73fd35e0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35e0 } +0x556b46cd0b90->Object::Object { 0x556b46cd0b40 } +Next token is token 'a' (0x556b46cd0b90 'a') +Shifting token 'a' (0x556b46cd0b90 'a') Entering state 1 Stack now 0 10 1 -0x7fff73fd36c0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c95be78f00 'a') --> $$ = nterm item (0x7fff73fd36c0 'a') -0x55c95be78f00->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36c0 } -0x55c95be78f00->Object::Object { 0x55c95be78ee0, 0x7fff73fd36c0 } -0x7fff73fd36c0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36c0 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556b46cd0b90 'a') +-> $$ = nterm item (0x556b46cd0b90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x7fff73fd35bf->Object::Object { 0x55c95be78ee0, 0x55c95be78f00 } -0x7fff73fd36a0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'a' (0x7fff73fd36a0 'a') -0x7fff73fd35e0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35e0, 0x7fff73fd36a0 } -Shifting token 'a' (0x7fff73fd35e0 'a') -0x55c95be78f20->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35e0 } -0x7fff73fd35e0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35e0 } +0x556b46cd0be0->Object::Object { 0x556b46cd0b40, 0x556b46cd0b90 } +Next token is token 'a' (0x556b46cd0be0 'a') +Shifting token 'a' (0x556b46cd0be0 'a') Entering state 1 Stack now 0 10 10 1 -0x7fff73fd36c0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c95be78f20 'a') --> $$ = nterm item (0x7fff73fd36c0 'a') -0x55c95be78f20->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36c0 } -0x55c95be78f20->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36c0 } -0x7fff73fd36c0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36c0 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556b46cd0be0 'a') +-> $$ = nterm item (0x556b46cd0be0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff73fd35bf->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20 } -0x7fff73fd36a0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'a' (0x7fff73fd36a0 'a') -0x7fff73fd35e0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35e0, 0x7fff73fd36a0 } -Shifting token 'a' (0x7fff73fd35e0 'a') -0x55c95be78f40->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35e0 } -0x7fff73fd35e0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd35e0 } +0x556b46cd0c30->Object::Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0 } +Next token is token 'a' (0x556b46cd0c30 'a') +Shifting token 'a' (0x556b46cd0c30 'a') Entering state 1 Stack now 0 10 10 10 1 -0x7fff73fd36c0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c95be78f40 'a') --> $$ = nterm item (0x7fff73fd36c0 'a') -0x55c95be78f40->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd36c0 } -0x55c95be78f40->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36c0 } -0x7fff73fd36c0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd36c0 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556b46cd0c30 'a') +-> $$ = nterm item (0x556b46cd0c30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff73fd35bf->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40 } -0x7fff73fd36a0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'p' (0x7fff73fd36a0 'p'Exception caught: cleaning lookahead and stack -0x55c95be78f40->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd36a0 } -0x55c95be78f20->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36a0 } -0x55c95be78f00->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36a0 } -0x55c95be78ee0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x7fff73fd36a0 } +0x556b46cd0c80->Object::Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0, 0x556b46cd0c30 } +Next token is token 'p' (0x556b46cd0c80 'p'Exception caught: cleaning lookahead and stack +0x556b46cd0c80->Object::~Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0, 0x556b46cd0c30, 0x556b46cd0c80 } +0x556b46cd0c30->Object::~Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0, 0x556b46cd0c30 } +0x556b46cd0be0->Object::~Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0 } +0x556b46cd0b90->Object::~Object { 0x556b46cd0b40, 0x556b46cd0b90 } +0x556b46cd0b40->Object::~Object { 0x556b46cd0b40 } exception caught: printer end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +723. glr-regression.at:592: testing Duplicate representation of merged trees: %union { char *ptr; } glr.c ... Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff73fd35bf->Object::Object { } -0x7fff73fd36a0->Object::Object { 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'a' (0x7fff73fd36a0 'a') -0x7fff73fd35e0->Object::Object { 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x7fff73fd35e0, 0x7fff73fd36a0 } -Shifting token 'a' (0x7fff73fd35e0 'a') -0x55c95be78ee0->Object::Object { 0x7fff73fd35e0 } -0x7fff73fd35e0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd35e0 } +0x556b46cd0b40->Object::Object { } +Next token is token 'a' (0x556b46cd0b40 'a') +Shifting token 'a' (0x556b46cd0b40 'a') Entering state 1 Stack now 0 1 -0x7fff73fd36c0->Object::Object { 0x55c95be78ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c95be78ee0 'a') --> $$ = nterm item (0x7fff73fd36c0 'a') -0x55c95be78ee0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd36c0 } -0x55c95be78ee0->Object::Object { 0x7fff73fd36c0 } -0x7fff73fd36c0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd36c0 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556b46cd0b40 'a') +-> $$ = nterm item (0x556b46cd0b40 'a') Entering state 10 Stack now 0 10 Reading a token -0x7fff73fd35bf->Object::Object { 0x55c95be78ee0 } -0x7fff73fd36a0->Object::Object { 0x55c95be78ee0, 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x55c95be78ee0, 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'a' (0x7fff73fd36a0 'a') -0x7fff73fd35e0->Object::Object { 0x55c95be78ee0, 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd35e0, 0x7fff73fd36a0 } -Shifting token 'a' (0x7fff73fd35e0 'a') -0x55c95be78f00->Object::Object { 0x55c95be78ee0, 0x7fff73fd35e0 } -0x7fff73fd35e0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35e0 } +0x556b46cd0b90->Object::Object { 0x556b46cd0b40 } +Next token is token 'a' (0x556b46cd0b90 'a') +Shifting token 'a' (0x556b46cd0b90 'a') Entering state 1 Stack now 0 10 1 -0x7fff73fd36c0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c95be78f00 'a') --> $$ = nterm item (0x7fff73fd36c0 'a') -0x55c95be78f00->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36c0 } -0x55c95be78f00->Object::Object { 0x55c95be78ee0, 0x7fff73fd36c0 } -0x7fff73fd36c0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36c0 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556b46cd0b90 'a') +-> $$ = nterm item (0x556b46cd0b90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x7fff73fd35bf->Object::Object { 0x55c95be78ee0, 0x55c95be78f00 } -0x7fff73fd36a0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'a' (0x7fff73fd36a0 'a') -0x7fff73fd35e0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35e0, 0x7fff73fd36a0 } -Shifting token 'a' (0x7fff73fd35e0 'a') -0x55c95be78f20->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd35e0 } -0x7fff73fd35e0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35e0 } +0x556b46cd0be0->Object::Object { 0x556b46cd0b40, 0x556b46cd0b90 } +Next token is token 'a' (0x556b46cd0be0 'a') +Shifting token 'a' (0x556b46cd0be0 'a') Entering state 1 Stack now 0 10 10 1 -0x7fff73fd36c0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c95be78f20 'a') --> $$ = nterm item (0x7fff73fd36c0 'a') -0x55c95be78f20->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36c0 } -0x55c95be78f20->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36c0 } -0x7fff73fd36c0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36c0 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556b46cd0be0 'a') +-> $$ = nterm item (0x556b46cd0be0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff73fd35bf->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20 } -0x7fff73fd36a0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'a' (0x7fff73fd36a0 'a') -0x7fff73fd35e0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35e0, 0x7fff73fd36a0 } -Shifting token 'a' (0x7fff73fd35e0 'a') -0x55c95be78f40->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd35e0 } -0x7fff73fd35e0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd35e0 } +0x556b46cd0c30->Object::Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0 } +Next token is token 'a' (0x556b46cd0c30 'a') +Shifting token 'a' (0x556b46cd0c30 'a') Entering state 1 Stack now 0 10 10 10 1 -0x7fff73fd36c0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55c95be78f40 'a') --> $$ = nterm item (0x7fff73fd36c0 'a') -0x55c95be78f40->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd36c0 } -0x55c95be78f40->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36c0 } -0x7fff73fd36c0->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd36c0 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556b46cd0c30 'a') +-> $$ = nterm item (0x556b46cd0c30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff73fd35bf->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40 } -0x7fff73fd36a0->Object::Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd35bf } -0x7fff73fd35bf->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd35bf, 0x7fff73fd36a0 } -Next token is token 'p' (0x7fff73fd36a0 'p'Exception caught: cleaning lookahead and stack -0x55c95be78f40->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x55c95be78f40, 0x7fff73fd36a0 } -0x55c95be78f20->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x55c95be78f20, 0x7fff73fd36a0 } -0x55c95be78f00->Object::~Object { 0x55c95be78ee0, 0x55c95be78f00, 0x7fff73fd36a0 } -0x55c95be78ee0->Object::~Object { 0x55c95be78ee0, 0x7fff73fd36a0 } -0x7fff73fd36a0->Object::~Object { 0x7fff73fd36a0 } +0x556b46cd0c80->Object::Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0, 0x556b46cd0c30 } +Next token is token 'p' (0x556b46cd0c80 'p'Exception caught: cleaning lookahead and stack +0x556b46cd0c80->Object::~Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0, 0x556b46cd0c30, 0x556b46cd0c80 } +0x556b46cd0c30->Object::~Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0, 0x556b46cd0c30 } +0x556b46cd0be0->Object::~Object { 0x556b46cd0b40, 0x556b46cd0b90, 0x556b46cd0be0 } +0x556b46cd0b90->Object::~Object { 0x556b46cd0b40, 0x556b46cd0b90 } +0x556b46cd0b40->Object::~Object { 0x556b46cd0b40 } exception caught: printer end { } -./c++.at:1363: grep '^exception caught: printer$' stderr +./c++.at:1361: grep '^exception caught: printer$' stderr +./glr-regression.at:592: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y stdout: +======== Testing with C++ standard flags: '' exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae +./c++.at:1361: $PREPARSER ./input aaaae +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS stderr: exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:592: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS +./c++.at:1361: $PREPARSER ./input aaaaE stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:596: $PREPARSER ./glr-regr4 stderr: -./c++.at:1363: $PREPARSER ./input aaaaT +./c++.at:1555: ./check +stdout: +./c++.at:1361: $PREPARSER ./input aaaaT +./c++.at:1555: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -fcaret -o test.cc test.y +./c++.at:857: $PREPARSER ./input stderr: -./glr-regression.at:596: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -726. glr-regression.at:596: ok -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS - +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ... -./glr-regression.at:593: $PREPARSER ./glr-regr4 -./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y +./c++.at:1066: ./check +./c++.at:1361: $PREPARSER ./input aaaaR stderr: -./glr-regression.at:593: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -724. glr-regression.at:593: ok +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +stdout: +======== Testing with C++ standard flags: '' +./c++.at:1411: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +======== Testing with C++ standard flags: '' +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1555: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o test test.cc $LIBS +stderr: +stdout: +./glr-regression.at:355: $PREPARSER ./glr-regr2a input1.txt +stderr: +./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:355: $PREPARSER ./glr-regr2a input2.txt +stderr: +./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:355: $PREPARSER ./glr-regr2a input3.txt +stderr: +./glr-regression.at:355: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +718. glr-regression.at:355: ok -./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS +724. glr-regression.at:593: testing Duplicate representation of merged trees: %union { char *ptr; } glr.cc ... +./glr-regression.at:593: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +./glr-regression.at:593: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS stderr: stdout: ./c++.at:1362: $PREPARSER ./input aaaas -732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ... -./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y stderr: exception caught: reduction ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -267151,1640 +266831,1852 @@ ./c++.at:1362: $PREPARSER ./input aaaap stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS ./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: -stderr: -stdout: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffc2d291a07->Object::Object { } -0x7ffc2d291aa0->Object::Object { 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'a' (0x7ffc2d291aa0 'a') -0x7ffc2d2919e0->Object::Object { 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x7ffc2d2919e0, 0x7ffc2d291aa0 } -Shifting token 'a' (0x7ffc2d2919e0 'a') -0x557a9c7baee0->Object::Object { 0x7ffc2d2919e0 } -0x7ffc2d2919e0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d2919e0 } +0x7ffc24aa2a67->Object::Object { } +0x7ffc24aa2b00->Object::Object { 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'a' (0x7ffc24aa2b00 'a') +0x7ffc24aa2a40->Object::Object { 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x7ffc24aa2a40, 0x7ffc24aa2b00 } +Shifting token 'a' (0x7ffc24aa2a40 'a') +0x55df9d6eaee0->Object::Object { 0x7ffc24aa2a40 } +0x7ffc24aa2a40->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2a40 } Entering state 2 Stack now 0 2 -0x7ffc2d291ac0->Object::Object { 0x557a9c7baee0 } +0x7ffc24aa2b20->Object::Object { 0x55df9d6eaee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x557a9c7baee0 'a') --> $$ = nterm item (0x7ffc2d291ac0 'a') -0x557a9c7baee0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d291ac0 } -0x557a9c7baee0->Object::Object { 0x7ffc2d291ac0 } -0x7ffc2d291ac0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d291ac0 } + $1 = token 'a' (0x55df9d6eaee0 'a') +-> $$ = nterm item (0x7ffc24aa2b20 'a') +0x55df9d6eaee0->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2b20 } +0x55df9d6eaee0->Object::Object { 0x7ffc24aa2b20 } +0x7ffc24aa2b20->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2b20 } Entering state 11 Stack now 0 11 Reading a token -0x7ffc2d291a07->Object::Object { 0x557a9c7baee0 } -0x7ffc2d291aa0->Object::Object { 0x557a9c7baee0, 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x557a9c7baee0, 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'a' (0x7ffc2d291aa0 'a') -0x7ffc2d2919e0->Object::Object { 0x557a9c7baee0, 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d2919e0, 0x7ffc2d291aa0 } -Shifting token 'a' (0x7ffc2d2919e0 'a') -0x557a9c7baf00->Object::Object { 0x557a9c7baee0, 0x7ffc2d2919e0 } -0x7ffc2d2919e0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d2919e0 } +0x7ffc24aa2a67->Object::Object { 0x55df9d6eaee0 } +0x7ffc24aa2b00->Object::Object { 0x55df9d6eaee0, 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'a' (0x7ffc24aa2b00 'a') +0x7ffc24aa2a40->Object::Object { 0x55df9d6eaee0, 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2a40, 0x7ffc24aa2b00 } +Shifting token 'a' (0x7ffc24aa2a40 'a') +0x55df9d6eaf00->Object::Object { 0x55df9d6eaee0, 0x7ffc24aa2a40 } +0x7ffc24aa2a40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a40 } Entering state 2 Stack now 0 11 2 -0x7ffc2d291ac0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00 } +0x7ffc24aa2b20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x557a9c7baf00 'a') --> $$ = nterm item (0x7ffc2d291ac0 'a') -0x557a9c7baf00->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291ac0 } -0x557a9c7baf00->Object::Object { 0x557a9c7baee0, 0x7ffc2d291ac0 } -0x7ffc2d291ac0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291ac0 } + $1 = token 'a' (0x55df9d6eaf00 'a') +-> $$ = nterm item (0x7ffc24aa2b20 'a') +0x55df9d6eaf00->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b20 } +0x55df9d6eaf00->Object::Object { 0x55df9d6eaee0, 0x7ffc24aa2b20 } +0x7ffc24aa2b20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b20 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffc2d291a07->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00 } -0x7ffc2d291aa0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'a' (0x7ffc2d291aa0 'a') -0x7ffc2d2919e0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d2919e0, 0x7ffc2d291aa0 } -Shifting token 'a' (0x7ffc2d2919e0 'a') -0x557a9c7baf20->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d2919e0 } -0x7ffc2d2919e0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d2919e0 } +0x7ffc24aa2a67->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00 } +0x7ffc24aa2b00->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'a' (0x7ffc24aa2b00 'a') +0x7ffc24aa2a40->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a40, 0x7ffc24aa2b00 } +Shifting token 'a' (0x7ffc24aa2a40 'a') +0x55df9d6eaf20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a40 } +0x7ffc24aa2a40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a40 } Entering state 2 Stack now 0 11 11 2 -0x7ffc2d291ac0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20 } +0x7ffc24aa2b20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x557a9c7baf20 'a') --> $$ = nterm item (0x7ffc2d291ac0 'a') -0x557a9c7baf20->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291ac0 } -0x557a9c7baf20->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291ac0 } -0x7ffc2d291ac0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291ac0 } + $1 = token 'a' (0x55df9d6eaf20 'a') +-> $$ = nterm item (0x7ffc24aa2b20 'a') +0x55df9d6eaf20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b20 } +0x55df9d6eaf20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b20 } +0x7ffc24aa2b20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b20 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffc2d291a07->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20 } -0x7ffc2d291aa0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'a' (0x7ffc2d291aa0 'a') -0x7ffc2d2919e0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d2919e0, 0x7ffc2d291aa0 } -Shifting token 'a' (0x7ffc2d2919e0 'a') -0x557a9c7baf40->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d2919e0 } -0x7ffc2d2919e0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d2919e0 } +0x7ffc24aa2a67->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20 } +0x7ffc24aa2b00->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'a' (0x7ffc24aa2b00 'a') +0x7ffc24aa2a40->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a40, 0x7ffc24aa2b00 } +Shifting token 'a' (0x7ffc24aa2a40 'a') +0x55df9d6eaf40->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a40 } +0x7ffc24aa2a40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2a40 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffc2d291ac0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40 } +0x7ffc24aa2b20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x557a9c7baf40 'a') --> $$ = nterm item (0x7ffc2d291ac0 'a') -0x557a9c7baf40->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291ac0 } -0x557a9c7baf40->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291ac0 } -0x7ffc2d291ac0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291ac0 } + $1 = token 'a' (0x55df9d6eaf40 'a') +-> $$ = nterm item (0x7ffc24aa2b20 'a') +0x55df9d6eaf40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2b20 } +0x55df9d6eaf40->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b20 } +0x7ffc24aa2b20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2b20 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffc2d291a07->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40 } -0x7ffc2d291aa0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'p' (0x7ffc2d291aa0 'p'Exception caught: cleaning lookahead and stack -0x557a9c7baf40->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291aa0 } -0x557a9c7baf20->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291aa0 } -0x557a9c7baf00->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291aa0 } -0x557a9c7baee0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x7ffc2d291aa0 } +0x7ffc24aa2a67->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40 } +0x7ffc24aa2b00->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'p' (0x7ffc24aa2b00 'p'Exception caught: cleaning lookahead and stack +0x55df9d6eaf40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2b00 } +0x55df9d6eaf20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b00 } +0x55df9d6eaf00->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b00 } +0x55df9d6eaee0->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x7ffc24aa2b00 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:670: $PREPARSER ./glr-regr5 -stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffc2d291a07->Object::Object { } -0x7ffc2d291aa0->Object::Object { 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'a' (0x7ffc2d291aa0 'a') -0x7ffc2d2919e0->Object::Object { 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x7ffc2d2919e0, 0x7ffc2d291aa0 } -Shifting token 'a' (0x7ffc2d2919e0 'a') -0x557a9c7baee0->Object::Object { 0x7ffc2d2919e0 } -0x7ffc2d2919e0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d2919e0 } +0x7ffc24aa2a67->Object::Object { } +0x7ffc24aa2b00->Object::Object { 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'a' (0x7ffc24aa2b00 'a') +0x7ffc24aa2a40->Object::Object { 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x7ffc24aa2a40, 0x7ffc24aa2b00 } +Shifting token 'a' (0x7ffc24aa2a40 'a') +0x55df9d6eaee0->Object::Object { 0x7ffc24aa2a40 } +0x7ffc24aa2a40->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2a40 } Entering state 2 Stack now 0 2 -0x7ffc2d291ac0->Object::Object { 0x557a9c7baee0 } +0x7ffc24aa2b20->Object::Object { 0x55df9d6eaee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x557a9c7baee0 'a') --> $$ = nterm item (0x7ffc2d291ac0 'a') -0x557a9c7baee0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d291ac0 } -0x557a9c7baee0->Object::Object { 0x7ffc2d291ac0 } -0x7ffc2d291ac0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d291ac0 } + $1 = token 'a' (0x55df9d6eaee0 'a') +-> $$ = nterm item (0x7ffc24aa2b20 'a') +0x55df9d6eaee0->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2b20 } +0x55df9d6eaee0->Object::Object { 0x7ffc24aa2b20 } +0x7ffc24aa2b20->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2b20 } Entering state 11 Stack now 0 11 Reading a token -0x7ffc2d291a07->Object::Object { 0x557a9c7baee0 } -0x7ffc2d291aa0->Object::Object { 0x557a9c7baee0, 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x557a9c7baee0, 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'a' (0x7ffc2d291aa0 'a') -0x7ffc2d2919e0->Object::Object { 0x557a9c7baee0, 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d2919e0, 0x7ffc2d291aa0 } -Shifting token 'a' (0x7ffc2d2919e0 'a') -0x557a9c7baf00->Object::Object { 0x557a9c7baee0, 0x7ffc2d2919e0 } -0x7ffc2d2919e0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d2919e0 } +0x7ffc24aa2a67->Object::Object { 0x55df9d6eaee0 } +0x7ffc24aa2b00->Object::Object { 0x55df9d6eaee0, 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'a' (0x7ffc24aa2b00 'a') +0x7ffc24aa2a40->Object::Object { 0x55df9d6eaee0, 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2a40, 0x7ffc24aa2b00 } +Shifting token 'a' (0x7ffc24aa2a40 'a') +0x55df9d6eaf00->Object::Object { 0x55df9d6eaee0, 0x7ffc24aa2a40 } +0x7ffc24aa2a40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a40 } Entering state 2 Stack now 0 11 2 -0x7ffc2d291ac0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00 } +0x7ffc24aa2b20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x557a9c7baf00 'a') --> $$ = nterm item (0x7ffc2d291ac0 'a') -0x557a9c7baf00->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291ac0 } -0x557a9c7baf00->Object::Object { 0x557a9c7baee0, 0x7ffc2d291ac0 } -0x7ffc2d291ac0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291ac0 } + $1 = token 'a' (0x55df9d6eaf00 'a') +-> $$ = nterm item (0x7ffc24aa2b20 'a') +0x55df9d6eaf00->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b20 } +0x55df9d6eaf00->Object::Object { 0x55df9d6eaee0, 0x7ffc24aa2b20 } +0x7ffc24aa2b20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b20 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffc2d291a07->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00 } -0x7ffc2d291aa0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'a' (0x7ffc2d291aa0 'a') -0x7ffc2d2919e0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d2919e0, 0x7ffc2d291aa0 } -Shifting token 'a' (0x7ffc2d2919e0 'a') -0x557a9c7baf20->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d2919e0 } -0x7ffc2d2919e0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d2919e0 } +0x7ffc24aa2a67->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00 } +0x7ffc24aa2b00->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'a' (0x7ffc24aa2b00 'a') +0x7ffc24aa2a40->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a40, 0x7ffc24aa2b00 } +Shifting token 'a' (0x7ffc24aa2a40 'a') +0x55df9d6eaf20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2a40 } +0x7ffc24aa2a40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a40 } Entering state 2 Stack now 0 11 11 2 -0x7ffc2d291ac0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20 } +0x7ffc24aa2b20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x557a9c7baf20 'a') --> $$ = nterm item (0x7ffc2d291ac0 'a') -0x557a9c7baf20->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291ac0 } -0x557a9c7baf20->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291ac0 } -0x7ffc2d291ac0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291ac0 } + $1 = token 'a' (0x55df9d6eaf20 'a') +-> $$ = nterm item (0x7ffc24aa2b20 'a') +0x55df9d6eaf20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b20 } +0x55df9d6eaf20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b20 } +0x7ffc24aa2b20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b20 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffc2d291a07->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20 } -0x7ffc2d291aa0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'a' (0x7ffc2d291aa0 'a') -0x7ffc2d2919e0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d2919e0, 0x7ffc2d291aa0 } -Shifting token 'a' (0x7ffc2d2919e0 'a') -0x557a9c7baf40->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d2919e0 } -0x7ffc2d2919e0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d2919e0 } +0x7ffc24aa2a67->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20 } +0x7ffc24aa2b00->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'a' (0x7ffc24aa2b00 'a') +0x7ffc24aa2a40->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a40, 0x7ffc24aa2b00 } +Shifting token 'a' (0x7ffc24aa2a40 'a') +0x55df9d6eaf40->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2a40 } +0x7ffc24aa2a40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2a40 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffc2d291ac0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40 } +0x7ffc24aa2b20->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x557a9c7baf40 'a') --> $$ = nterm item (0x7ffc2d291ac0 'a') -0x557a9c7baf40->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291ac0 } -0x557a9c7baf40->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291ac0 } -0x7ffc2d291ac0->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291ac0 } + $1 = token 'a' (0x55df9d6eaf40 'a') +-> $$ = nterm item (0x7ffc24aa2b20 'a') +0x55df9d6eaf40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2b20 } +0x55df9d6eaf40->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b20 } +0x7ffc24aa2b20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2b20 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffc2d291a07->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40 } -0x7ffc2d291aa0->Object::Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291a07 } -0x7ffc2d291a07->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291a07, 0x7ffc2d291aa0 } -Next token is token 'p' (0x7ffc2d291aa0 'p'Exception caught: cleaning lookahead and stack -0x557a9c7baf40->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x557a9c7baf40, 0x7ffc2d291aa0 } -0x557a9c7baf20->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x557a9c7baf20, 0x7ffc2d291aa0 } -0x557a9c7baf00->Object::~Object { 0x557a9c7baee0, 0x557a9c7baf00, 0x7ffc2d291aa0 } -0x557a9c7baee0->Object::~Object { 0x557a9c7baee0, 0x7ffc2d291aa0 } -0x7ffc2d291aa0->Object::~Object { 0x7ffc2d291aa0 } +0x7ffc24aa2a67->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40 } +0x7ffc24aa2b00->Object::Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2a67 } +0x7ffc24aa2a67->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2a67, 0x7ffc24aa2b00 } +Next token is token 'p' (0x7ffc24aa2b00 'p'Exception caught: cleaning lookahead and stack +0x55df9d6eaf40->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x55df9d6eaf40, 0x7ffc24aa2b00 } +0x55df9d6eaf20->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x55df9d6eaf20, 0x7ffc24aa2b00 } +0x55df9d6eaf00->Object::~Object { 0x55df9d6eaee0, 0x55df9d6eaf00, 0x7ffc24aa2b00 } +0x55df9d6eaee0->Object::~Object { 0x55df9d6eaee0, 0x7ffc24aa2b00 } +0x7ffc24aa2b00->Object::~Object { 0x7ffc24aa2b00 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr stdout: exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae -729. glr-regression.at:670: ok stderr: exception caught: syntax error ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -./c++.at:857: $PREPARSER ./input ./c++.at:1362: $PREPARSER ./input aaaaE stderr: - +stdout: stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:488: $PREPARSER ./glr-regr3 input.txt +stderr: ./c++.at:1362: $PREPARSER ./input aaaaT -./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: +./glr-regression.at:488: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +720. glr-regression.at:488: ok ./c++.at:1362: $PREPARSER ./input aaaaR stderr: +stderr: +stdout: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:592: $PREPARSER ./glr-regr4 +stderr: +stdout: +./c++.at:857: $PREPARSER ./input +stderr: ======== Testing with C++ standard flags: '' stderr: ./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stdout: -689. c++.at:1371: ok +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./glr-regression.at:592: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ... -./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +723. glr-regression.at:592: ok -./glr-regression.at:739: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS -734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ... -./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y +725. glr-regression.at:594: testing Duplicate representation of merged trees: %union { char *ptr; } glr2.cc ... +./glr-regression.at:594: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +726. glr-regression.at:596: testing Duplicate representation of merged trees: api.value.type=union glr.c ... +./glr-regression.at:596: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.c glr-regr4.y +./glr-regression.at:594: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +./glr-regression.at:596: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr4 glr-regr4.c $LIBS stderr: stdout: -./c++.at:1555: $PREPARSER ./test -./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS +./glr-regression.at:489: $PREPARSER ./glr-regr3 input.txt stderr: -./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -691. c++.at:1517: ok +./glr-regression.at:489: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +721. glr-regression.at:489: ok + +727. glr-regression.at:597: testing Duplicate representation of merged trees: api.value.type=union glr.cc ... +./glr-regression.at:597: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y +./glr-regression.at:597: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS +stderr: +stdout: +./glr-regression.at:593: $PREPARSER ./glr-regr4 +stderr: +./glr-regression.at:593: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +724. glr-regression.at:593: ok -735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ... -./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y stderr: -./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS stdout: ./c++.at:1360: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +689. c++.at:1371: ok ./c++.at:1360: $PREPARSER ./input aaaal stderr: exception caught: yylex ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: +728. glr-regression.at:598: testing Duplicate representation of merged trees: api.value.type=union glr2.cc ... +./glr-regression.at:598: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr4.cc glr-regr4.y + ./c++.at:1360: $PREPARSER ./input i -./glr-regression.at:594: $PREPARSER ./glr-regr4 -stderr: stderr: exception caught: initial-action ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:594: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -725. glr-regression.at:594: ok ./c++.at:1360: $PREPARSER ./input aaaap stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1360: $PREPARSER ./input --debug aaaap - +./glr-regression.at:598: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr4 glr-regr4.cc $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x55cbd38f9b40->Object::Object { } -Next token is token 'a' (0x55cbd38f9b40 'a') -Shifting token 'a' (0x55cbd38f9b40 'a') +0x5634721c6b40->Object::Object { } +Next token is token 'a' (0x5634721c6b40 'a') +Shifting token 'a' (0x5634721c6b40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55cbd38f9b40 'a') --> $$ = nterm item (0x55cbd38f9b40 'a') + $1 = token 'a' (0x5634721c6b40 'a') +-> $$ = nterm item (0x5634721c6b40 'a') Entering state 11 Stack now 0 11 Reading a token -0x55cbd38f9b90->Object::Object { 0x55cbd38f9b40 } -Next token is token 'a' (0x55cbd38f9b90 'a') -Shifting token 'a' (0x55cbd38f9b90 'a') +0x5634721c6b90->Object::Object { 0x5634721c6b40 } +Next token is token 'a' (0x5634721c6b90 'a') +Shifting token 'a' (0x5634721c6b90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55cbd38f9b90 'a') --> $$ = nterm item (0x55cbd38f9b90 'a') + $1 = token 'a' (0x5634721c6b90 'a') +-> $$ = nterm item (0x5634721c6b90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x55cbd38f9be0->Object::Object { 0x55cbd38f9b40, 0x55cbd38f9b90 } -Next token is token 'a' (0x55cbd38f9be0 'a') -Shifting token 'a' (0x55cbd38f9be0 'a') +0x5634721c6be0->Object::Object { 0x5634721c6b40, 0x5634721c6b90 } +Next token is token 'a' (0x5634721c6be0 'a') +Shifting token 'a' (0x5634721c6be0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55cbd38f9be0 'a') --> $$ = nterm item (0x55cbd38f9be0 'a') + $1 = token 'a' (0x5634721c6be0 'a') +-> $$ = nterm item (0x5634721c6be0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x55cbd38f9c30->Object::Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0 } -Next token is token 'a' (0x55cbd38f9c30 'a') -Shifting token 'a' (0x55cbd38f9c30 'a') +0x5634721c6c30->Object::Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0 } +Next token is token 'a' (0x5634721c6c30 'a') +Shifting token 'a' (0x5634721c6c30 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55cbd38f9c30 'a') --> $$ = nterm item (0x55cbd38f9c30 'a') + $1 = token 'a' (0x5634721c6c30 'a') +-> $$ = nterm item (0x5634721c6c30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x55cbd38f9c80->Object::Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0, 0x55cbd38f9c30 } -Next token is token 'p' (0x55cbd38f9c80 'p'Exception caught: cleaning lookahead and stack -0x55cbd38f9c80->Object::~Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0, 0x55cbd38f9c30, 0x55cbd38f9c80 } -0x55cbd38f9c30->Object::~Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0, 0x55cbd38f9c30 } -0x55cbd38f9be0->Object::~Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0 } -0x55cbd38f9b90->Object::~Object { 0x55cbd38f9b40, 0x55cbd38f9b90 } -0x55cbd38f9b40->Object::~Object { 0x55cbd38f9b40 } +0x5634721c6c80->Object::Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0, 0x5634721c6c30 } +Next token is token 'p' (0x5634721c6c80 'p'Exception caught: cleaning lookahead and stack +0x5634721c6c80->Object::~Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0, 0x5634721c6c30, 0x5634721c6c80 } +0x5634721c6c30->Object::~Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0, 0x5634721c6c30 } +0x5634721c6be0->Object::~Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0 } +0x5634721c6b90->Object::~Object { 0x5634721c6b40, 0x5634721c6b90 } +0x5634721c6b40->Object::~Object { 0x5634721c6b40 } exception caught: printer end { } +stdout: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./glr-regression.at:596: $PREPARSER ./glr-regr4 Starting parse Entering state 0 Stack now 0 Reading a token -0x55cbd38f9b40->Object::Object { } -Next token is token 'a' (0x55cbd38f9b40 'a') -Shifting token 'a' (0x55cbd38f9b40 'a') +0x5634721c6b40->Object::Object { } +Next token is token 'a' (0x5634721c6b40 'a') +Shifting token 'a' (0x5634721c6b40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55cbd38f9b40 'a') --> $$ = nterm item (0x55cbd38f9b40 'a') + $1 = token 'a' (0x5634721c6b40 'a') +-> $$ = nterm item (0x5634721c6b40 'a') Entering state 11 Stack now 0 11 Reading a token -0x55cbd38f9b90->Object::Object { 0x55cbd38f9b40 } -Next token is token 'a' (0x55cbd38f9b90 'a') -Shifting token 'a' (0x55cbd38f9b90 'a') +0x5634721c6b90->Object::Object { 0x5634721c6b40 } +Next token is token 'a' (0x5634721c6b90 'a') +Shifting token 'a' (0x5634721c6b90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55cbd38f9b90 'a') --> $$ = nterm item (0x55cbd38f9b90 'a') + $1 = token 'a' (0x5634721c6b90 'a') +-> $$ = nterm item (0x5634721c6b90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x55cbd38f9be0->Object::Object { 0x55cbd38f9b40, 0x55cbd38f9b90 } -Next token is token 'a' (0x55cbd38f9be0 'a') -Shifting token 'a' (0x55cbd38f9be0 'a') +0x5634721c6be0->Object::Object { 0x5634721c6b40, 0x5634721c6b90 } +Next token is token 'a' (0x5634721c6be0 'a') +Shifting token 'a' (0x5634721c6be0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55cbd38f9be0 'a') --> $$ = nterm item (0x55cbd38f9be0 'a') + $1 = token 'a' (0x5634721c6be0 'a') +-> $$ = nterm item (0x5634721c6be0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x55cbd38f9c30->Object::Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0 } -Next token is token 'a' (0x55cbd38f9c30 'a') -Shifting token 'a' (0x55cbd38f9c30 'a') +0x5634721c6c30->Object::Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0 } +Next token is token 'a' (0x5634721c6c30 'a') +Shifting token 'a' (0x5634721c6c30 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55cbd38f9c30 'a') --> $$ = nterm item (0x55cbd38f9c30 'a') + $1 = token 'a' (0x5634721c6c30 'a') +-> $$ = nterm item (0x5634721c6c30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x55cbd38f9c80->Object::Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0, 0x55cbd38f9c30 } -Next token is token 'p' (0x55cbd38f9c80 'p'Exception caught: cleaning lookahead and stack -0x55cbd38f9c80->Object::~Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0, 0x55cbd38f9c30, 0x55cbd38f9c80 } -0x55cbd38f9c30->Object::~Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0, 0x55cbd38f9c30 } -0x55cbd38f9be0->Object::~Object { 0x55cbd38f9b40, 0x55cbd38f9b90, 0x55cbd38f9be0 } -0x55cbd38f9b90->Object::~Object { 0x55cbd38f9b40, 0x55cbd38f9b90 } -0x55cbd38f9b40->Object::~Object { 0x55cbd38f9b40 } +0x5634721c6c80->Object::Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0, 0x5634721c6c30 } +Next token is token 'p' (0x5634721c6c80 'p'Exception caught: cleaning lookahead and stack +0x5634721c6c80->Object::~Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0, 0x5634721c6c30, 0x5634721c6c80 } +0x5634721c6c30->Object::~Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0, 0x5634721c6c30 } +0x5634721c6be0->Object::~Object { 0x5634721c6b40, 0x5634721c6b90, 0x5634721c6be0 } +0x5634721c6b90->Object::~Object { 0x5634721c6b40, 0x5634721c6b90 } +0x5634721c6b40->Object::~Object { 0x5634721c6b40 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr +stderr: stdout: +729. glr-regression.at:670: testing User destructor for unresolved GLR semantic value: glr.c ... +./glr-regression.at:670: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.c glr-regr5.y exception caught: printer ./c++.at:1360: $PREPARSER ./input aaaae +./glr-regression.at:596: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: syntax error ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +726. glr-regression.at:596: ok ./c++.at:1360: $PREPARSER ./input aaaaE stderr: -stderr: -stdout: + exception caught: syntax error, unexpected end of file, expecting 'a' +stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -736. glr-regression.at:844: testing Duplicated user destructor for lookahead: glr.cc ... -./glr-regression.at:844: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y -./glr-regression.at:490: $PREPARSER ./glr-regr3 input.txt -./c++.at:1360: $PREPARSER ./input aaaaT +stdout: +./c++.at:1555: $PREPARSER ./test +./glr-regression.at:670: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr5 glr-regr5.c $LIBS stderr: +./c++.at:1360: $PREPARSER ./input aaaaT stderr: -./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1555: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -722. glr-regression.at:490: ok +691. c++.at:1517: ok ./c++.at:1360: $PREPARSER ./input aaaaR stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +730. glr-regression.at:671: testing User destructor for unresolved GLR semantic value: glr.cc ... +./glr-regression.at:671: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y ======== Testing with C++ standard flags: '' - ./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:844: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS -737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ... -./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y -stderr: -stdout: -./glr-regression.at:597: $PREPARSER ./glr-regr4 -stderr: -./glr-regression.at:845: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS -./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -727. glr-regression.at:597: ok - -738. glr-regression.at:944: testing Incorrectly initialized location for empty right-hand side in GLR: glr.c ... -./glr-regression.at:944: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.c glr-regr8.y -stderr: -stdout: -./glr-regression.at:738: $PREPARSER ./glr-regr6 -stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -732. glr-regression.at:738: ok -./glr-regression.at:944: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr8 glr-regr8.c $LIBS -739. glr-regression.at:945: testing Incorrectly initialized location for empty right-hand side in GLR: glr.cc ... -./glr-regression.at:945: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y stderr: stdout: -./glr-regression.at:671: $PREPARSER ./glr-regr5 +./glr-regression.at:207: $PREPARSER ./glr-regr1 BPBPB stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:945: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS -730. glr-regression.at:671: ok - -740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ... -./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y -./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS +./glr-regression.at:671: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS +./glr-regression.at:207: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./glr-regression.at:843: $PREPARSER ./glr-regr7 -stderr: -memory exhausted -./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaas stderr: +716. glr-regression.at:207: ok stdout: -./c++.at:1361: $PREPARSER ./input aaaas stderr: -735. glr-regression.at:843: ok exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaal +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: $PREPARSER ./input stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +731. glr-regression.at:672: testing User destructor for unresolved GLR semantic value: glr2.cc ... +./glr-regression.at:672: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr5.cc glr-regr5.y +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal -./c++.at:1361: $PREPARSER ./input i stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaap -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1363: $PREPARSER ./input i stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input --debug aaaap +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +732. glr-regression.at:738: testing User destructor after an error during a split parse: glr.c ... +./glr-regression.at:738: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.c glr-regr6.y +./c++.at:1363: $PREPARSER ./input --debug aaaap +./glr-regression.at:672: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr5 glr-regr5.cc $LIBS stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x555a21732b40->Object::Object { } -Next token is token 'a' (0x555a21732b40 'a') -Shifting token 'a' (0x555a21732b40 'a') +0x7ffc5f894d7f->Object::Object { } +0x7ffc5f894e60->Object::Object { 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'a' (0x7ffc5f894e60 'a') +0x7ffc5f894da0->Object::Object { 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x7ffc5f894da0, 0x7ffc5f894e60 } +Shifting token 'a' (0x7ffc5f894da0 'a') +0x560328692ee0->Object::Object { 0x7ffc5f894da0 } +0x7ffc5f894da0->Object::~Object { 0x560328692ee0, 0x7ffc5f894da0 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555a21732b40 'a') --> $$ = nterm item (0x555a21732b40 'a') +0x7ffc5f894e80->Object::Object { 0x560328692ee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x560328692ee0 'a') +-> $$ = nterm item (0x7ffc5f894e80 'a') +0x560328692ee0->Object::~Object { 0x560328692ee0, 0x7ffc5f894e80 } +0x560328692ee0->Object::Object { 0x7ffc5f894e80 } +0x7ffc5f894e80->Object::~Object { 0x560328692ee0, 0x7ffc5f894e80 } Entering state 10 Stack now 0 10 Reading a token -0x555a21732b90->Object::Object { 0x555a21732b40 } -Next token is token 'a' (0x555a21732b90 'a') -Shifting token 'a' (0x555a21732b90 'a') +0x7ffc5f894d7f->Object::Object { 0x560328692ee0 } +0x7ffc5f894e60->Object::Object { 0x560328692ee0, 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x560328692ee0, 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'a' (0x7ffc5f894e60 'a') +0x7ffc5f894da0->Object::Object { 0x560328692ee0, 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x560328692ee0, 0x7ffc5f894da0, 0x7ffc5f894e60 } +Shifting token 'a' (0x7ffc5f894da0 'a') +0x560328692f00->Object::Object { 0x560328692ee0, 0x7ffc5f894da0 } +0x7ffc5f894da0->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894da0 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555a21732b90 'a') --> $$ = nterm item (0x555a21732b90 'a') +0x7ffc5f894e80->Object::Object { 0x560328692ee0, 0x560328692f00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x560328692f00 'a') +-> $$ = nterm item (0x7ffc5f894e80 'a') +0x560328692f00->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e80 } +0x560328692f00->Object::Object { 0x560328692ee0, 0x7ffc5f894e80 } +0x7ffc5f894e80->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e80 } Entering state 10 Stack now 0 10 10 Reading a token -0x555a21732be0->Object::Object { 0x555a21732b40, 0x555a21732b90 } -Next token is token 'a' (0x555a21732be0 'a') -Shifting token 'a' (0x555a21732be0 'a') +0x7ffc5f894d7f->Object::Object { 0x560328692ee0, 0x560328692f00 } +0x7ffc5f894e60->Object::Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'a' (0x7ffc5f894e60 'a') +0x7ffc5f894da0->Object::Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894da0, 0x7ffc5f894e60 } +Shifting token 'a' (0x7ffc5f894da0 'a') +0x560328692f20->Object::Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894da0 } +0x7ffc5f894da0->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894da0 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555a21732be0 'a') --> $$ = nterm item (0x555a21732be0 'a') +0x7ffc5f894e80->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x560328692f20 'a') +-> $$ = nterm item (0x7ffc5f894e80 'a') +0x560328692f20->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e80 } +0x560328692f20->Object::Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e80 } +0x7ffc5f894e80->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e80 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x555a21732c30->Object::Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0 } -Next token is token 'a' (0x555a21732c30 'a') -Shifting token 'a' (0x555a21732c30 'a') +0x7ffc5f894d7f->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20 } +0x7ffc5f894e60->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'a' (0x7ffc5f894e60 'a') +0x7ffc5f894da0->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894da0, 0x7ffc5f894e60 } +Shifting token 'a' (0x7ffc5f894da0 'a') +0x560328692f40->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894da0 } +0x7ffc5f894da0->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894da0 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555a21732c30 'a') --> $$ = nterm item (0x555a21732c30 'a') +0x7ffc5f894e80->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x560328692f40 'a') +-> $$ = nterm item (0x7ffc5f894e80 'a') +0x560328692f40->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894e80 } +0x560328692f40->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e80 } +0x7ffc5f894e80->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894e80 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x555a21732c80->Object::Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0, 0x555a21732c30 } -Next token is token 'p' (0x555a21732c80 'p'Exception caught: cleaning lookahead and stack -0x555a21732c80->Object::~Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0, 0x555a21732c30, 0x555a21732c80 } -0x555a21732c30->Object::~Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0, 0x555a21732c30 } -0x555a21732be0->Object::~Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0 } -0x555a21732b90->Object::~Object { 0x555a21732b40, 0x555a21732b90 } -0x555a21732b40->Object::~Object { 0x555a21732b40 } +0x7ffc5f894d7f->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40 } +0x7ffc5f894e60->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'p' (0x7ffc5f894e60 'p'Exception caught: cleaning lookahead and stack +0x560328692f40->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894e60 } +0x560328692f20->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e60 } +0x560328692f00->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e60 } +0x560328692ee0->Object::~Object { 0x560328692ee0, 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x7ffc5f894e60 } exception caught: printer end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ... -./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y -stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: Starting parse Entering state 0 Stack now 0 Reading a token -0x555a21732b40->Object::Object { } -Next token is token 'a' (0x555a21732b40 'a') -Shifting token 'a' (0x555a21732b40 'a') +0x7ffc5f894d7f->Object::Object { } +0x7ffc5f894e60->Object::Object { 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'a' (0x7ffc5f894e60 'a') +0x7ffc5f894da0->Object::Object { 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x7ffc5f894da0, 0x7ffc5f894e60 } +Shifting token 'a' (0x7ffc5f894da0 'a') +0x560328692ee0->Object::Object { 0x7ffc5f894da0 } +0x7ffc5f894da0->Object::~Object { 0x560328692ee0, 0x7ffc5f894da0 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555a21732b40 'a') --> $$ = nterm item (0x555a21732b40 'a') +0x7ffc5f894e80->Object::Object { 0x560328692ee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x560328692ee0 'a') +-> $$ = nterm item (0x7ffc5f894e80 'a') +0x560328692ee0->Object::~Object { 0x560328692ee0, 0x7ffc5f894e80 } +0x560328692ee0->Object::Object { 0x7ffc5f894e80 } +0x7ffc5f894e80->Object::~Object { 0x560328692ee0, 0x7ffc5f894e80 } Entering state 10 Stack now 0 10 Reading a token -0x555a21732b90->Object::Object { 0x555a21732b40 } -Next token is token 'a' (0x555a21732b90 'a') -Shifting token 'a' (0x555a21732b90 'a') +0x7ffc5f894d7f->Object::Object { 0x560328692ee0 } +0x7ffc5f894e60->Object::Object { 0x560328692ee0, 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x560328692ee0, 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'a' (0x7ffc5f894e60 'a') +0x7ffc5f894da0->Object::Object { 0x560328692ee0, 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x560328692ee0, 0x7ffc5f894da0, 0x7ffc5f894e60 } +Shifting token 'a' (0x7ffc5f894da0 'a') +0x560328692f00->Object::Object { 0x560328692ee0, 0x7ffc5f894da0 } +0x7ffc5f894da0->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894da0 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555a21732b90 'a') --> $$ = nterm item (0x555a21732b90 'a') +0x7ffc5f894e80->Object::Object { 0x560328692ee0, 0x560328692f00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x560328692f00 'a') +-> $$ = nterm item (0x7ffc5f894e80 'a') +0x560328692f00->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e80 } +0x560328692f00->Object::Object { 0x560328692ee0, 0x7ffc5f894e80 } +0x7ffc5f894e80->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e80 } Entering state 10 Stack now 0 10 10 Reading a token -0x555a21732be0->Object::Object { 0x555a21732b40, 0x555a21732b90 } -Next token is token 'a' (0x555a21732be0 'a') -Shifting token 'a' (0x555a21732be0 'a') +0x7ffc5f894d7f->Object::Object { 0x560328692ee0, 0x560328692f00 } +0x7ffc5f894e60->Object::Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'a' (0x7ffc5f894e60 'a') +0x7ffc5f894da0->Object::Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894da0, 0x7ffc5f894e60 } +Shifting token 'a' (0x7ffc5f894da0 'a') +0x560328692f20->Object::Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894da0 } +0x7ffc5f894da0->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894da0 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555a21732be0 'a') --> $$ = nterm item (0x555a21732be0 'a') +0x7ffc5f894e80->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x560328692f20 'a') +-> $$ = nterm item (0x7ffc5f894e80 'a') +0x560328692f20->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e80 } +0x560328692f20->Object::Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e80 } +0x7ffc5f894e80->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e80 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x555a21732c30->Object::Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0 } -Next token is token 'a' (0x555a21732c30 'a') -Shifting token 'a' (0x555a21732c30 'a') +0x7ffc5f894d7f->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20 } +0x7ffc5f894e60->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'a' (0x7ffc5f894e60 'a') +0x7ffc5f894da0->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894da0, 0x7ffc5f894e60 } +Shifting token 'a' (0x7ffc5f894da0 'a') +0x560328692f40->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894da0 } +0x7ffc5f894da0->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894da0 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555a21732c30 'a') --> $$ = nterm item (0x555a21732c30 'a') +0x7ffc5f894e80->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x560328692f40 'a') +-> $$ = nterm item (0x7ffc5f894e80 'a') +0x560328692f40->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894e80 } +0x560328692f40->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e80 } +0x7ffc5f894e80->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894e80 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x555a21732c80->Object::Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0, 0x555a21732c30 } -Next token is token 'p' (0x555a21732c80 'p'Exception caught: cleaning lookahead and stack -0x555a21732c80->Object::~Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0, 0x555a21732c30, 0x555a21732c80 } -0x555a21732c30->Object::~Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0, 0x555a21732c30 } -0x555a21732be0->Object::~Object { 0x555a21732b40, 0x555a21732b90, 0x555a21732be0 } -0x555a21732b90->Object::~Object { 0x555a21732b40, 0x555a21732b90 } -0x555a21732b40->Object::~Object { 0x555a21732b40 } +0x7ffc5f894d7f->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40 } +0x7ffc5f894e60->Object::Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894d7f } +0x7ffc5f894d7f->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894d7f, 0x7ffc5f894e60 } +Next token is token 'p' (0x7ffc5f894e60 'p'Exception caught: cleaning lookahead and stack +0x560328692f40->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x560328692f40, 0x7ffc5f894e60 } +0x560328692f20->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x560328692f20, 0x7ffc5f894e60 } +0x560328692f00->Object::~Object { 0x560328692ee0, 0x560328692f00, 0x7ffc5f894e60 } +0x560328692ee0->Object::~Object { 0x560328692ee0, 0x7ffc5f894e60 } +0x7ffc5f894e60->Object::~Object { 0x7ffc5f894e60 } exception caught: printer end { } -./c++.at:1361: grep '^exception caught: printer$' stderr -./glr-regression.at:739: $PREPARSER ./glr-regr6 +./c++.at:1363: grep '^exception caught: printer$' stderr stdout: -stderr: exception caught: printer -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./c++.at:1361: $PREPARSER ./input aaaae -./glr-regression.at:739: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -733. glr-regression.at:739: ok -./c++.at:1361: $PREPARSER ./input aaaaE +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:738: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr6 glr-regr6.c $LIBS +./c++.at:1363: $PREPARSER ./input aaaaE +stderr: +stdout: stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - -./c++.at:1361: $PREPARSER ./input aaaaT +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:356: $PREPARSER ./glr-regr2a input1.txt stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR +./c++.at:1363: $PREPARSER ./input aaaaT +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ... -./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y -./glr-regression.at:1037: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR +./glr-regression.at:356: $PREPARSER ./glr-regr2a input2.txt stderr: -stdout: -./glr-regression.at:944: $PREPARSER ./glr-regr8 stderr: -./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -738. glr-regression.at:944: ok +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:356: $PREPARSER ./glr-regr2a input3.txt +stderr: +./glr-regression.at:356: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +719. glr-regression.at:356: ok +733. glr-regression.at:739: testing User destructor after an error during a split parse: glr.cc ... +./glr-regression.at:739: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y stderr: stdout: -./c++.at:1363: $PREPARSER ./input aaaas +./c++.at:1361: $PREPARSER ./input aaaas stderr: exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaal stderr: exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:739: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS +./c++.at:1361: $PREPARSER ./input i stderr: exception caught: initial-action -743. glr-regression.at:1038: testing No users destructors if stack 0 deleted: glr2.cc ... -./glr-regression.at:1038: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaap stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: -./c++.at:1363: $PREPARSER ./input --debug aaaap -./glr-regression.at:844: $PREPARSER ./glr-regr7 +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:670: $PREPARSER ./glr-regr5 stderr: +./c++.at:1361: $PREPARSER ./input --debug aaaap +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:670: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffe317f653f->Object::Object { } -0x7ffe317f6610->Object::Object { 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'a' (0x7ffe317f6610 'a') -0x7ffe317f6560->Object::Object { 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x7ffe317f6560, 0x7ffe317f6610 } -Shifting token 'a' (0x7ffe317f6560 'a') -0x565506d3dee0->Object::Object { 0x7ffe317f6560 } -0x7ffe317f6560->Object::~Object { 0x565506d3dee0, 0x7ffe317f6560 } +0x55c95a767b40->Object::Object { } +Next token is token 'a' (0x55c95a767b40 'a') +Shifting token 'a' (0x55c95a767b40 'a') Entering state 1 Stack now 0 1 -0x7ffe317f6630->Object::Object { 0x565506d3dee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x565506d3dee0 'a') --> $$ = nterm item (0x7ffe317f6630 'a') -0x565506d3dee0->Object::~Object { 0x565506d3dee0, 0x7ffe317f6630 } -0x565506d3dee0->Object::Object { 0x7ffe317f6630 } -0x7ffe317f6630->Object::~Object { 0x565506d3dee0, 0x7ffe317f6630 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c95a767b40 'a') +-> $$ = nterm item (0x55c95a767b40 'a') Entering state 10 Stack now 0 10 Reading a token -0x7ffe317f653f->Object::Object { 0x565506d3dee0 } -0x7ffe317f6610->Object::Object { 0x565506d3dee0, 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x565506d3dee0, 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'a' (0x7ffe317f6610 'a') -0x7ffe317f6560->Object::Object { 0x565506d3dee0, 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x565506d3dee0, 0x7ffe317f6560, 0x7ffe317f6610 } -Shifting token 'a' (0x7ffe317f6560 'a') -0x565506d3df00->Object::Object { 0x565506d3dee0, 0x7ffe317f6560 } -0x7ffe317f6560->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6560 } +0x55c95a767b90->Object::Object { 0x55c95a767b40 } +Next token is token 'a' (0x55c95a767b90 'a') +Shifting token 'a' (0x55c95a767b90 'a') Entering state 1 Stack now 0 10 1 -0x7ffe317f6630->Object::Object { 0x565506d3dee0, 0x565506d3df00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x565506d3df00 'a') --> $$ = nterm item (0x7ffe317f6630 'a') -0x565506d3df00->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6630 } -0x565506d3df00->Object::Object { 0x565506d3dee0, 0x7ffe317f6630 } -0x7ffe317f6630->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6630 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c95a767b90 'a') +-> $$ = nterm item (0x55c95a767b90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x7ffe317f653f->Object::Object { 0x565506d3dee0, 0x565506d3df00 } -0x7ffe317f6610->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'a' (0x7ffe317f6610 'a') -0x7ffe317f6560->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6560, 0x7ffe317f6610 } -Shifting token 'a' (0x7ffe317f6560 'a') -0x565506d3df20->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6560 } -0x7ffe317f6560->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6560 } +0x55c95a767be0->Object::Object { 0x55c95a767b40, 0x55c95a767b90 } +Next token is token 'a' (0x55c95a767be0 'a') +Shifting token 'a' (0x55c95a767be0 'a') Entering state 1 Stack now 0 10 10 1 -0x7ffe317f6630->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x565506d3df20 'a') --> $$ = nterm item (0x7ffe317f6630 'a') -0x565506d3df20->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6630 } -0x565506d3df20->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6630 } -0x7ffe317f6630->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6630 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c95a767be0 'a') +-> $$ = nterm item (0x55c95a767be0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffe317f653f->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20 } -0x7ffe317f6610->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'a' (0x7ffe317f6610 'a') -0x7ffe317f6560->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6560, 0x7ffe317f6610 } -Shifting token 'a' (0x7ffe317f6560 'a') -0x565506d3df40->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6560 } -0x7ffe317f6560->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f6560 } +0x55c95a767c30->Object::Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0 } +Next token is token 'a' (0x55c95a767c30 'a') +Shifting token 'a' (0x55c95a767c30 'a') Entering state 1 Stack now 0 10 10 10 1 -0x7ffe317f6630->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x565506d3df40 'a') --> $$ = nterm item (0x7ffe317f6630 'a') -0x565506d3df40->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f6630 } -0x565506d3df40->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6630 } -0x7ffe317f6630->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f6630 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c95a767c30 'a') +-> $$ = nterm item (0x55c95a767c30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffe317f653f->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40 } -0x7ffe317f6610->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'p' (0x7ffe317f6610 'p'Exception caught: cleaning lookahead and stack -0x565506d3df40->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f6610 } -0x565506d3df20->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6610 } -0x565506d3df00->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6610 } -0x565506d3dee0->Object::~Object { 0x565506d3dee0, 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x7ffe317f6610 } +0x55c95a767c80->Object::Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0, 0x55c95a767c30 } +Next token is token 'p' (0x55c95a767c80 'p'Exception caught: cleaning lookahead and stack +0x55c95a767c80->Object::~Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0, 0x55c95a767c30, 0x55c95a767c80 } +0x55c95a767c30->Object::~Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0, 0x55c95a767c30 } +0x55c95a767be0->Object::~Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0 } +0x55c95a767b90->Object::~Object { 0x55c95a767b40, 0x55c95a767b90 } +0x55c95a767b40->Object::~Object { 0x55c95a767b40 } exception caught: printer end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -memory exhausted -./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffe317f653f->Object::Object { } -0x7ffe317f6610->Object::Object { 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'a' (0x7ffe317f6610 'a') -0x7ffe317f6560->Object::Object { 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x7ffe317f6560, 0x7ffe317f6610 } -Shifting token 'a' (0x7ffe317f6560 'a') -0x565506d3dee0->Object::Object { 0x7ffe317f6560 } -0x7ffe317f6560->Object::~Object { 0x565506d3dee0, 0x7ffe317f6560 } +0x55c95a767b40->Object::Object { } +Next token is token 'a' (0x55c95a767b40 'a') +Shifting token 'a' (0x55c95a767b40 'a') Entering state 1 Stack now 0 1 -0x7ffe317f6630->Object::Object { 0x565506d3dee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x565506d3dee0 'a') --> $$ = nterm item (0x7ffe317f6630 'a') -0x565506d3dee0->Object::~Object { 0x565506d3dee0, 0x7ffe317f6630 } -0x565506d3dee0->Object::Object { 0x7ffe317f6630 } -0x7ffe317f6630->Object::~Object { 0x565506d3dee0, 0x7ffe317f6630 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c95a767b40 'a') +-> $$ = nterm item (0x55c95a767b40 'a') Entering state 10 Stack now 0 10 Reading a token -0x7ffe317f653f->Object::Object { 0x565506d3dee0 } -0x7ffe317f6610->Object::Object { 0x565506d3dee0, 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x565506d3dee0, 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'a' (0x7ffe317f6610 'a') -0x7ffe317f6560->Object::Object { 0x565506d3dee0, 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x565506d3dee0, 0x7ffe317f6560, 0x7ffe317f6610 } -Shifting token 'a' (0x7ffe317f6560 'a') -0x565506d3df00->Object::Object { 0x565506d3dee0, 0x7ffe317f6560 } -0x7ffe317f6560->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6560 } +0x55c95a767b90->Object::Object { 0x55c95a767b40 } +Next token is token 'a' (0x55c95a767b90 'a') +Shifting token 'a' (0x55c95a767b90 'a') Entering state 1 Stack now 0 10 1 -0x7ffe317f6630->Object::Object { 0x565506d3dee0, 0x565506d3df00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x565506d3df00 'a') --> $$ = nterm item (0x7ffe317f6630 'a') -0x565506d3df00->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6630 } -0x565506d3df00->Object::Object { 0x565506d3dee0, 0x7ffe317f6630 } -0x7ffe317f6630->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6630 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c95a767b90 'a') +-> $$ = nterm item (0x55c95a767b90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x7ffe317f653f->Object::Object { 0x565506d3dee0, 0x565506d3df00 } -0x7ffe317f6610->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'a' (0x7ffe317f6610 'a') -0x7ffe317f6560->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6560, 0x7ffe317f6610 } -Shifting token 'a' (0x7ffe317f6560 'a') -0x565506d3df20->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6560 } -0x7ffe317f6560->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6560 } +0x55c95a767be0->Object::Object { 0x55c95a767b40, 0x55c95a767b90 } +Next token is token 'a' (0x55c95a767be0 'a') +Shifting token 'a' (0x55c95a767be0 'a') Entering state 1 Stack now 0 10 10 1 -0x7ffe317f6630->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x565506d3df20 'a') --> $$ = nterm item (0x7ffe317f6630 'a') -0x565506d3df20->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6630 } -0x565506d3df20->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6630 } -0x7ffe317f6630->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6630 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c95a767be0 'a') +-> $$ = nterm item (0x55c95a767be0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffe317f653f->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20 } -0x7ffe317f6610->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'a' (0x7ffe317f6610 'a') -0x7ffe317f6560->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6560, 0x7ffe317f6610 } -Shifting token 'a' (0x7ffe317f6560 'a') -0x565506d3df40->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6560 } -0x7ffe317f6560->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f6560 } +0x55c95a767c30->Object::Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0 } +Next token is token 'a' (0x55c95a767c30 'a') +Shifting token 'a' (0x55c95a767c30 'a') Entering state 1 Stack now 0 10 10 10 1 -0x7ffe317f6630->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x565506d3df40 'a') --> $$ = nterm item (0x7ffe317f6630 'a') -0x565506d3df40->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f6630 } -0x565506d3df40->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6630 } -0x7ffe317f6630->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f6630 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c95a767c30 'a') +-> $$ = nterm item (0x55c95a767c30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffe317f653f->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40 } -0x7ffe317f6610->Object::Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f653f } -0x7ffe317f653f->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f653f, 0x7ffe317f6610 } -Next token is token 'p' (0x7ffe317f6610 'p'Exception caught: cleaning lookahead and stack -0x565506d3df40->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x565506d3df40, 0x7ffe317f6610 } -0x565506d3df20->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x565506d3df20, 0x7ffe317f6610 } -0x565506d3df00->Object::~Object { 0x565506d3dee0, 0x565506d3df00, 0x7ffe317f6610 } -0x565506d3dee0->Object::~Object { 0x565506d3dee0, 0x7ffe317f6610 } -0x7ffe317f6610->Object::~Object { 0x7ffe317f6610 } +0x55c95a767c80->Object::Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0, 0x55c95a767c30 } +Next token is token 'p' (0x55c95a767c80 'p'Exception caught: cleaning lookahead and stack +0x55c95a767c80->Object::~Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0, 0x55c95a767c30, 0x55c95a767c80 } +0x55c95a767c30->Object::~Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0, 0x55c95a767c30 } +0x55c95a767be0->Object::~Object { 0x55c95a767b40, 0x55c95a767b90, 0x55c95a767be0 } +0x55c95a767b90->Object::~Object { 0x55c95a767b40, 0x55c95a767b90 } +0x55c95a767b40->Object::~Object { 0x55c95a767b40 } exception caught: printer end { } -./glr-regression.at:1038: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS -./c++.at:1363: grep '^exception caught: printer$' stderr +./c++.at:1361: grep '^exception caught: printer$' stderr +729. glr-regression.at:670: ok stdout: -736. glr-regression.at:844: ok exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae +./c++.at:1361: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaE +stderr: stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaT +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./glr-regression.at:490: $PREPARSER ./glr-regr3 input.txt +./c++.at:1361: $PREPARSER ./input aaaaT stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR -744. glr-regression.at:1102: testing Corrupted semantic options if user action cuts parse: glr.c ... -./glr-regression.at:1102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.c glr-regr10.y +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:490: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR +stderr: +722. glr-regression.at:490: ok +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:1102: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr10 glr-regr10.c $LIBS +./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +734. glr-regression.at:740: testing User destructor after an error during a split parse: glr2.cc ... +./glr-regression.at:740: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr6.cc glr-regr6.y + +./glr-regression.at:740: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr6 glr-regr6.cc $LIBS +735. glr-regression.at:843: testing Duplicated user destructor for lookahead: glr.c ... +./glr-regression.at:843: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.c glr-regr7.y stderr: stdout: -./c++.at:858: $PREPARSER ./input +./glr-regression.at:597: $PREPARSER ./glr-regr4 stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:597: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +727. glr-regression.at:597: ok + +./glr-regression.at:843: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr7 glr-regr7.c $LIBS stderr: stdout: -./glr-regression.at:598: $PREPARSER ./glr-regr4 +./glr-regression.at:738: $PREPARSER ./glr-regr6 stderr: -./glr-regression.at:598: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -728. glr-regression.at:598: ok +Ambiguity detected. +Option 1, + start -> + 'a' +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:738: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stderr: +736. glr-regression.at:844: testing Duplicated user destructor for lookahead: glr.cc ... +./glr-regression.at:844: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y stdout: +./c++.at:857: $PREPARSER ./input +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +732. glr-regression.at:738: ok +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:844: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS + +stderr: stdout: -./c++.at:1360: $PREPARSER ./input aaaas +./c++.at:1362: $PREPARSER ./input aaaas stderr: +737. glr-regression.at:845: testing Duplicated user destructor for lookahead: glr2.cc ... exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in -./c++.at:1360: $PREPARSER ./input aaaal +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:845: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr7.cc glr-regr7.y +./c++.at:1362: $PREPARSER ./input aaaal stderr: exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input i +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input i stderr: exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1360: $PREPARSER ./input aaaap -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaap stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./glr-regression.at:1036: $PREPARSER ./glr-regr9 -./c++.at:1360: $PREPARSER ./input --debug aaaap -stdout: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:845: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr7 glr-regr7.cc $LIBS +./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: -./c++.at:1066: $PREPARSER ./input < in Starting parse Entering state 0 Stack now 0 Reading a token -0x55b69f0ceb40->Object::Object { } -Next token is token 'a' (0x55b69f0ceb40 'a') -Shifting token 'a' (0x55b69f0ceb40 'a') +0x7fff3597de37->Object::Object { } +0x7fff3597dec0->Object::Object { 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'a' (0x7fff3597dec0 'a') +0x7fff3597de10->Object::Object { 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x7fff3597de10, 0x7fff3597dec0 } +Shifting token 'a' (0x7fff3597de10 'a') +0x556b4c9edee0->Object::Object { 0x7fff3597de10 } +0x7fff3597de10->Object::~Object { 0x556b4c9edee0, 0x7fff3597de10 } Entering state 2 Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55b69f0ceb40 'a') --> $$ = nterm item (0x55b69f0ceb40 'a') +0x7fff3597dee0->Object::Object { 0x556b4c9edee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556b4c9edee0 'a') +-> $$ = nterm item (0x7fff3597dee0 'a') +0x556b4c9edee0->Object::~Object { 0x556b4c9edee0, 0x7fff3597dee0 } +0x556b4c9edee0->Object::Object { 0x7fff3597dee0 } +0x7fff3597dee0->Object::~Object { 0x556b4c9edee0, 0x7fff3597dee0 } Entering state 11 Stack now 0 11 Reading a token -0x55b69f0ceb90->Object::Object { 0x55b69f0ceb40 } -Next token is token 'a' (0x55b69f0ceb90 'a') -Shifting token 'a' (0x55b69f0ceb90 'a') +0x7fff3597de37->Object::Object { 0x556b4c9edee0 } +0x7fff3597dec0->Object::Object { 0x556b4c9edee0, 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x556b4c9edee0, 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'a' (0x7fff3597dec0 'a') +0x7fff3597de10->Object::Object { 0x556b4c9edee0, 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x556b4c9edee0, 0x7fff3597de10, 0x7fff3597dec0 } +Shifting token 'a' (0x7fff3597de10 'a') +0x556b4c9edf00->Object::Object { 0x556b4c9edee0, 0x7fff3597de10 } +0x7fff3597de10->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de10 } Entering state 2 Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55b69f0ceb90 'a') --> $$ = nterm item (0x55b69f0ceb90 'a') +0x7fff3597dee0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556b4c9edf00 'a') +-> $$ = nterm item (0x7fff3597dee0 'a') +0x556b4c9edf00->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dee0 } +0x556b4c9edf00->Object::Object { 0x556b4c9edee0, 0x7fff3597dee0 } +0x7fff3597dee0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dee0 } Entering state 11 Stack now 0 11 11 Reading a token -0x55b69f0cebe0->Object::Object { 0x55b69f0ceb40, 0x55b69f0ceb90 } -Next token is token 'a' (0x55b69f0cebe0 'a') -Shifting token 'a' (0x55b69f0cebe0 'a') +0x7fff3597de37->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00 } +0x7fff3597dec0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'a' (0x7fff3597dec0 'a') +0x7fff3597de10->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de10, 0x7fff3597dec0 } +Shifting token 'a' (0x7fff3597de10 'a') +0x556b4c9edf20->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de10 } +0x7fff3597de10->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de10 } Entering state 2 Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55b69f0cebe0 'a') --> $$ = nterm item (0x55b69f0cebe0 'a') +0x7fff3597dee0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556b4c9edf20 'a') +-> $$ = nterm item (0x7fff3597dee0 'a') +0x556b4c9edf20->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dee0 } +0x556b4c9edf20->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dee0 } +0x7fff3597dee0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dee0 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x55b69f0cec30->Object::Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0 } -Next token is token 'a' (0x55b69f0cec30 'a') -Shifting token 'a' (0x55b69f0cec30 'a') +0x7fff3597de37->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20 } +0x7fff3597dec0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'a' (0x7fff3597dec0 'a') +0x7fff3597de10->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de10, 0x7fff3597dec0 } +Shifting token 'a' (0x7fff3597de10 'a') +0x556b4c9edf40->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de10 } +0x7fff3597de10->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597de10 } Entering state 2 Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55b69f0cec30 'a') --> $$ = nterm item (0x55b69f0cec30 'a') +0x7fff3597dee0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556b4c9edf40 'a') +-> $$ = nterm item (0x7fff3597dee0 'a') +0x556b4c9edf40->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597dee0 } +0x556b4c9edf40->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dee0 } +0x7fff3597dee0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597dee0 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x55b69f0cec80->Object::Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0, 0x55b69f0cec30 } -Next token is token 'p' (0x55b69f0cec80 'p'Exception caught: cleaning lookahead and stack -0x55b69f0cec80->Object::~Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0, 0x55b69f0cec30, 0x55b69f0cec80 } -0x55b69f0cec30->Object::~Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0, 0x55b69f0cec30 } -0x55b69f0cebe0->Object::~Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0 } -0x55b69f0ceb90->Object::~Object { 0x55b69f0ceb40, 0x55b69f0ceb90 } -0x55b69f0ceb40->Object::~Object { 0x55b69f0ceb40 } +0x7fff3597de37->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40 } +0x7fff3597dec0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'p' (0x7fff3597dec0 'p'Exception caught: cleaning lookahead and stack +0x556b4c9edf40->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597dec0 } +0x556b4c9edf20->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dec0 } +0x556b4c9edf00->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dec0 } +0x556b4c9edee0->Object::~Object { 0x556b4c9edee0, 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x7fff3597dec0 } exception caught: printer end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -memory exhausted -./c++.at:1362: $PREPARSER ./input aaaas -./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -exception caught: reduction Starting parse Entering state 0 Stack now 0 Reading a token -0x55b69f0ceb40->Object::Object { } -Next token is token 'a' (0x55b69f0ceb40 'a') -Shifting token 'a' (0x55b69f0ceb40 'a') +0x7fff3597de37->Object::Object { } +0x7fff3597dec0->Object::Object { 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'a' (0x7fff3597dec0 'a') +0x7fff3597de10->Object::Object { 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x7fff3597de10, 0x7fff3597dec0 } +Shifting token 'a' (0x7fff3597de10 'a') +0x556b4c9edee0->Object::Object { 0x7fff3597de10 } +0x7fff3597de10->Object::~Object { 0x556b4c9edee0, 0x7fff3597de10 } Entering state 2 Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55b69f0ceb40 'a') --> $$ = nterm item (0x55b69f0ceb40 'a') +0x7fff3597dee0->Object::Object { 0x556b4c9edee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556b4c9edee0 'a') +-> $$ = nterm item (0x7fff3597dee0 'a') +0x556b4c9edee0->Object::~Object { 0x556b4c9edee0, 0x7fff3597dee0 } +0x556b4c9edee0->Object::Object { 0x7fff3597dee0 } +0x7fff3597dee0->Object::~Object { 0x556b4c9edee0, 0x7fff3597dee0 } Entering state 11 Stack now 0 11 Reading a token -0x55b69f0ceb90->Object::Object { 0x55b69f0ceb40 } -Next token is token 'a' (0x55b69f0ceb90 'a') -Shifting token 'a' (0x55b69f0ceb90 'a') +0x7fff3597de37->Object::Object { 0x556b4c9edee0 } +0x7fff3597dec0->Object::Object { 0x556b4c9edee0, 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x556b4c9edee0, 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'a' (0x7fff3597dec0 'a') +0x7fff3597de10->Object::Object { 0x556b4c9edee0, 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x556b4c9edee0, 0x7fff3597de10, 0x7fff3597dec0 } +Shifting token 'a' (0x7fff3597de10 'a') +0x556b4c9edf00->Object::Object { 0x556b4c9edee0, 0x7fff3597de10 } +0x7fff3597de10->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de10 } Entering state 2 Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55b69f0ceb90 'a') --> $$ = nterm item (0x55b69f0ceb90 'a') +0x7fff3597dee0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556b4c9edf00 'a') +-> $$ = nterm item (0x7fff3597dee0 'a') +0x556b4c9edf00->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dee0 } +0x556b4c9edf00->Object::Object { 0x556b4c9edee0, 0x7fff3597dee0 } +0x7fff3597dee0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dee0 } Entering state 11 Stack now 0 11 11 Reading a token -0x55b69f0cebe0->Object::Object { 0x55b69f0ceb40, 0x55b69f0ceb90 } -Next token is token 'a' (0x55b69f0cebe0 'a') -Shifting token 'a' (0x55b69f0cebe0 'a') +0x7fff3597de37->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00 } +0x7fff3597dec0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'a' (0x7fff3597dec0 'a') +0x7fff3597de10->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de10, 0x7fff3597dec0 } +Shifting token 'a' (0x7fff3597de10 'a') +0x556b4c9edf20->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597de10 } +0x7fff3597de10->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de10 } Entering state 2 Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55b69f0cebe0 'a') --> $$ = nterm item (0x55b69f0cebe0 'a') +0x7fff3597dee0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556b4c9edf20 'a') +-> $$ = nterm item (0x7fff3597dee0 'a') +0x556b4c9edf20->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dee0 } +0x556b4c9edf20->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dee0 } +0x7fff3597dee0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dee0 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x55b69f0cec30->Object::Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0 } -Next token is token 'a' (0x55b69f0cec30 'a') -Shifting token 'a' (0x55b69f0cec30 'a') +0x7fff3597de37->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20 } +0x7fff3597dec0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'a' (0x7fff3597dec0 'a') +0x7fff3597de10->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de10, 0x7fff3597dec0 } +Shifting token 'a' (0x7fff3597de10 'a') +0x556b4c9edf40->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597de10 } +0x7fff3597de10->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597de10 } Entering state 2 Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55b69f0cec30 'a') --> $$ = nterm item (0x55b69f0cec30 'a') +0x7fff3597dee0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x556b4c9edf40 'a') +-> $$ = nterm item (0x7fff3597dee0 'a') +0x556b4c9edf40->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597dee0 } +0x556b4c9edf40->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dee0 } +0x7fff3597dee0->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597dee0 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x55b69f0cec80->Object::Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0, 0x55b69f0cec30 } -Next token is token 'p' (0x55b69f0cec80 'p'Exception caught: cleaning lookahead and stack -0x55b69f0cec80->Object::~Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0, 0x55b69f0cec30, 0x55b69f0cec80 } -0x55b69f0cec30->Object::~Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0, 0x55b69f0cec30 } -0x55b69f0cebe0->Object::~Object { 0x55b69f0ceb40, 0x55b69f0ceb90, 0x55b69f0cebe0 } -0x55b69f0ceb90->Object::~Object { 0x55b69f0ceb40, 0x55b69f0ceb90 } -0x55b69f0ceb40->Object::~Object { 0x55b69f0ceb40 } +0x7fff3597de37->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40 } +0x7fff3597dec0->Object::Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597de37 } +0x7fff3597de37->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597de37, 0x7fff3597dec0 } +Next token is token 'p' (0x7fff3597dec0 'p'Exception caught: cleaning lookahead and stack +0x556b4c9edf40->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x556b4c9edf40, 0x7fff3597dec0 } +0x556b4c9edf20->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x556b4c9edf20, 0x7fff3597dec0 } +0x556b4c9edf00->Object::~Object { 0x556b4c9edee0, 0x556b4c9edf00, 0x7fff3597dec0 } +0x556b4c9edee0->Object::~Object { 0x556b4c9edee0, 0x7fff3597dec0 } +0x7fff3597dec0->Object::~Object { 0x7fff3597dec0 } exception caught: printer end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: grep '^exception caught: printer$' stderr +./c++.at:1362: grep '^exception caught: printer$' stderr stdout: -745. glr-regression.at:1103: testing Corrupted semantic options if user action cuts parse: glr.cc ... exception caught: printer +./c++.at:1362: $PREPARSER ./input aaaae stderr: -./glr-regression.at:1103: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y -./c++.at:1360: $PREPARSER ./input aaaae -741. glr-regression.at:1036: ok -error: invalid expression -stderr: -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaal +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaE stderr: -./c++.at:1066: $PREPARSER ./input < in -exception caught: yylex +exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaE +./c++.at:1362: $PREPARSER ./input aaaaT +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR +stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stderr: +stdout: +stdout: +./glr-regression.at:843: $PREPARSER ./glr-regr7 +./c++.at:1066: $PREPARSER ./input < in +stderr: +memory exhausted +./glr-regression.at:843: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +error: invalid expression +caught error error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in +stderr: +735. glr-regression.at:843: ok stderr: +error: invalid expression ./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1103: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS -./c++.at:1362: $PREPARSER ./input i - +stdout: +./glr-regression.at:739: $PREPARSER ./glr-regr6 +./c++.at:1066: $PREPARSER ./input < in stderr: -exception caught: initial-action -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1360: $PREPARSER ./input aaaaT stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous ./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS -./c++.at:1362: $PREPARSER ./input aaaap +./glr-regression.at:739: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +733. glr-regression.at:739: ok + + +739. glr-regression.at:945: testing Incorrectly initialized location for empty right-hand side in GLR: glr.cc ... +./glr-regression.at:945: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR stderr: -./c++.at:1362: $PREPARSER ./input --debug aaaap +stdout: +stdout: +./glr-regression.at:671: $PREPARSER ./glr-regr5 +stderr: +./c++.at:1066: ./check +738. glr-regression.at:944: testing Incorrectly initialized location for empty right-hand side in GLR: glr.c ... +./glr-regression.at:944: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.c glr-regr8.y +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +./glr-regression.at:671: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +730. glr-regression.at:671: ok + +./glr-regression.at:944: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr8 glr-regr8.c $LIBS +./glr-regression.at:945: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS +740. glr-regression.at:946: testing Incorrectly initialized location for empty right-hand side in GLR: glr2.cc ... +./glr-regression.at:946: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr8.cc glr-regr8.y +stderr: +stdout: +./c++.at:1360: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal +./glr-regression.at:946: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr8 glr-regr8.cc $LIBS +stderr: +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap +stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffee3f7b5d7->Object::Object { } -0x7ffee3f7b660->Object::Object { 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'a' (0x7ffee3f7b660 'a') -0x7ffee3f7b5b0->Object::Object { 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x7ffee3f7b5b0, 0x7ffee3f7b660 } -Shifting token 'a' (0x7ffee3f7b5b0 'a') -0x5570f1be7ee0->Object::Object { 0x7ffee3f7b5b0 } -0x7ffee3f7b5b0->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b5b0 } +0x55c0d62bab40->Object::Object { } +Next token is token 'a' (0x55c0d62bab40 'a') +Shifting token 'a' (0x55c0d62bab40 'a') Entering state 2 Stack now 0 2 -0x7ffee3f7b680->Object::Object { 0x5570f1be7ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5570f1be7ee0 'a') --> $$ = nterm item (0x7ffee3f7b680 'a') -0x5570f1be7ee0->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b680 } -0x5570f1be7ee0->Object::Object { 0x7ffee3f7b680 } -0x7ffee3f7b680->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b680 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c0d62bab40 'a') +-> $$ = nterm item (0x55c0d62bab40 'a') Entering state 11 Stack now 0 11 Reading a token -0x7ffee3f7b5d7->Object::Object { 0x5570f1be7ee0 } -0x7ffee3f7b660->Object::Object { 0x5570f1be7ee0, 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'a' (0x7ffee3f7b660 'a') -0x7ffee3f7b5b0->Object::Object { 0x5570f1be7ee0, 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b5b0, 0x7ffee3f7b660 } -Shifting token 'a' (0x7ffee3f7b5b0 'a') -0x5570f1be7f00->Object::Object { 0x5570f1be7ee0, 0x7ffee3f7b5b0 } -0x7ffee3f7b5b0->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5b0 } +0x55c0d62bab90->Object::Object { 0x55c0d62bab40 } +Next token is token 'a' (0x55c0d62bab90 'a') +Shifting token 'a' (0x55c0d62bab90 'a') Entering state 2 Stack now 0 11 2 -0x7ffee3f7b680->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5570f1be7f00 'a') --> $$ = nterm item (0x7ffee3f7b680 'a') -0x5570f1be7f00->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b680 } -0x5570f1be7f00->Object::Object { 0x5570f1be7ee0, 0x7ffee3f7b680 } -0x7ffee3f7b680->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b680 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c0d62bab90 'a') +-> $$ = nterm item (0x55c0d62bab90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x7ffee3f7b5d7->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00 } -0x7ffee3f7b660->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'a' (0x7ffee3f7b660 'a') -0x7ffee3f7b5b0->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5b0, 0x7ffee3f7b660 } -Shifting token 'a' (0x7ffee3f7b5b0 'a') -0x5570f1be7f20->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5b0 } -0x7ffee3f7b5b0->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5b0 } +0x55c0d62babe0->Object::Object { 0x55c0d62bab40, 0x55c0d62bab90 } +Next token is token 'a' (0x55c0d62babe0 'a') +Shifting token 'a' (0x55c0d62babe0 'a') Entering state 2 Stack now 0 11 11 2 -0x7ffee3f7b680->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5570f1be7f20 'a') --> $$ = nterm item (0x7ffee3f7b680 'a') -0x5570f1be7f20->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b680 } -0x5570f1be7f20->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b680 } -0x7ffee3f7b680->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b680 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c0d62babe0 'a') +-> $$ = nterm item (0x55c0d62babe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffee3f7b5d7->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20 } -0x7ffee3f7b660->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'a' (0x7ffee3f7b660 'a') -0x7ffee3f7b5b0->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5b0, 0x7ffee3f7b660 } -Shifting token 'a' (0x7ffee3f7b5b0 'a') -0x5570f1be7f40->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5b0 } -0x7ffee3f7b5b0->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b5b0 } +0x55c0d62bac30->Object::Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0 } +Next token is token 'a' (0x55c0d62bac30 'a') +Shifting token 'a' (0x55c0d62bac30 'a') Entering state 2 Stack now 0 11 11 11 2 -0x7ffee3f7b680->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5570f1be7f40 'a') --> $$ = nterm item (0x7ffee3f7b680 'a') -0x5570f1be7f40->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b680 } -0x5570f1be7f40->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b680 } -0x7ffee3f7b680->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b680 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c0d62bac30 'a') +-> $$ = nterm item (0x55c0d62bac30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffee3f7b5d7->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40 } -0x7ffee3f7b660->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'p' (0x7ffee3f7b660 'p'Exception caught: cleaning lookahead and stack -0x5570f1be7f40->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b660 } -0x5570f1be7f20->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b660 } -0x5570f1be7f00->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b660 } -0x5570f1be7ee0->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x7ffee3f7b660 } +0x55c0d62bac80->Object::Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0, 0x55c0d62bac30 } +Next token is token 'p' (0x55c0d62bac80 'p'Exception caught: cleaning lookahead and stack +0x55c0d62bac80->Object::~Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0, 0x55c0d62bac30, 0x55c0d62bac80 } +0x55c0d62bac30->Object::~Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0, 0x55c0d62bac30 } +0x55c0d62babe0->Object::~Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0 } +0x55c0d62bab90->Object::~Object { 0x55c0d62bab40, 0x55c0d62bab90 } +0x55c0d62bab40->Object::~Object { 0x55c0d62bab40 } exception caught: printer end { } -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -746. glr-regression.at:1104: testing Corrupted semantic options if user action cuts parse: glr2.cc ... -./glr-regression.at:1104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y -======== Testing with C++ standard flags: '' -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffee3f7b5d7->Object::Object { } -0x7ffee3f7b660->Object::Object { 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'a' (0x7ffee3f7b660 'a') -0x7ffee3f7b5b0->Object::Object { 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x7ffee3f7b5b0, 0x7ffee3f7b660 } -Shifting token 'a' (0x7ffee3f7b5b0 'a') -0x5570f1be7ee0->Object::Object { 0x7ffee3f7b5b0 } -0x7ffee3f7b5b0->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b5b0 } +0x55c0d62bab40->Object::Object { } +Next token is token 'a' (0x55c0d62bab40 'a') +Shifting token 'a' (0x55c0d62bab40 'a') Entering state 2 Stack now 0 2 -0x7ffee3f7b680->Object::Object { 0x5570f1be7ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5570f1be7ee0 'a') --> $$ = nterm item (0x7ffee3f7b680 'a') -0x5570f1be7ee0->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b680 } -0x5570f1be7ee0->Object::Object { 0x7ffee3f7b680 } -0x7ffee3f7b680->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b680 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c0d62bab40 'a') +-> $$ = nterm item (0x55c0d62bab40 'a') Entering state 11 Stack now 0 11 Reading a token -0x7ffee3f7b5d7->Object::Object { 0x5570f1be7ee0 } -0x7ffee3f7b660->Object::Object { 0x5570f1be7ee0, 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'a' (0x7ffee3f7b660 'a') -0x7ffee3f7b5b0->Object::Object { 0x5570f1be7ee0, 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b5b0, 0x7ffee3f7b660 } -Shifting token 'a' (0x7ffee3f7b5b0 'a') -0x5570f1be7f00->Object::Object { 0x5570f1be7ee0, 0x7ffee3f7b5b0 } -0x7ffee3f7b5b0->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5b0 } +0x55c0d62bab90->Object::Object { 0x55c0d62bab40 } +Next token is token 'a' (0x55c0d62bab90 'a') +Shifting token 'a' (0x55c0d62bab90 'a') Entering state 2 Stack now 0 11 2 -0x7ffee3f7b680->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5570f1be7f00 'a') --> $$ = nterm item (0x7ffee3f7b680 'a') -0x5570f1be7f00->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b680 } -0x5570f1be7f00->Object::Object { 0x5570f1be7ee0, 0x7ffee3f7b680 } -0x7ffee3f7b680->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b680 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c0d62bab90 'a') +-> $$ = nterm item (0x55c0d62bab90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x7ffee3f7b5d7->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00 } -0x7ffee3f7b660->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'a' (0x7ffee3f7b660 'a') -0x7ffee3f7b5b0->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5b0, 0x7ffee3f7b660 } -Shifting token 'a' (0x7ffee3f7b5b0 'a') -0x5570f1be7f20->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b5b0 } -0x7ffee3f7b5b0->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5b0 } +0x55c0d62babe0->Object::Object { 0x55c0d62bab40, 0x55c0d62bab90 } +Next token is token 'a' (0x55c0d62babe0 'a') +Shifting token 'a' (0x55c0d62babe0 'a') Entering state 2 Stack now 0 11 11 2 -0x7ffee3f7b680->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5570f1be7f20 'a') --> $$ = nterm item (0x7ffee3f7b680 'a') -0x5570f1be7f20->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b680 } -0x5570f1be7f20->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b680 } -0x7ffee3f7b680->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b680 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c0d62babe0 'a') +-> $$ = nterm item (0x55c0d62babe0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffee3f7b5d7->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20 } -0x7ffee3f7b660->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'a' (0x7ffee3f7b660 'a') -0x7ffee3f7b5b0->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5b0, 0x7ffee3f7b660 } -Shifting token 'a' (0x7ffee3f7b5b0 'a') -0x5570f1be7f40->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b5b0 } -0x7ffee3f7b5b0->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b5b0 } +0x55c0d62bac30->Object::Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0 } +Next token is token 'a' (0x55c0d62bac30 'a') +Shifting token 'a' (0x55c0d62bac30 'a') Entering state 2 Stack now 0 11 11 11 2 -0x7ffee3f7b680->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5570f1be7f40 'a') --> $$ = nterm item (0x7ffee3f7b680 'a') -0x5570f1be7f40->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b680 } -0x5570f1be7f40->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b680 } -0x7ffee3f7b680->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b680 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55c0d62bac30 'a') +-> $$ = nterm item (0x55c0d62bac30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffee3f7b5d7->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40 } -0x7ffee3f7b660->Object::Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b5d7 } -0x7ffee3f7b5d7->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b5d7, 0x7ffee3f7b660 } -Next token is token 'p' (0x7ffee3f7b660 'p'Exception caught: cleaning lookahead and stack -0x5570f1be7f40->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x5570f1be7f40, 0x7ffee3f7b660 } -0x5570f1be7f20->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x5570f1be7f20, 0x7ffee3f7b660 } -0x5570f1be7f00->Object::~Object { 0x5570f1be7ee0, 0x5570f1be7f00, 0x7ffee3f7b660 } -0x5570f1be7ee0->Object::~Object { 0x5570f1be7ee0, 0x7ffee3f7b660 } -0x7ffee3f7b660->Object::~Object { 0x7ffee3f7b660 } +0x55c0d62bac80->Object::Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0, 0x55c0d62bac30 } +Next token is token 'p' (0x55c0d62bac80 'p'Exception caught: cleaning lookahead and stack +0x55c0d62bac80->Object::~Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0, 0x55c0d62bac30, 0x55c0d62bac80 } +0x55c0d62bac30->Object::~Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0, 0x55c0d62bac30 } +0x55c0d62babe0->Object::~Object { 0x55c0d62bab40, 0x55c0d62bab90, 0x55c0d62babe0 } +0x55c0d62bab90->Object::~Object { 0x55c0d62bab40, 0x55c0d62bab90 } +0x55c0d62bab40->Object::~Object { 0x55c0d62bab40 } exception caught: printer end { } -./c++.at:1362: grep '^exception caught: printer$' stderr +./c++.at:1360: grep '^exception caught: printer$' stderr stdout: exception caught: printer -./c++.at:1362: $PREPARSER ./input aaaae +./c++.at:1360: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT -./glr-regression.at:1104: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS -stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaT stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stdout: -./glr-regression.at:945: $PREPARSER ./glr-regr8 +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:1362: $PREPARSER ./input aaaaR -./c++.at:1066: ./check +./c++.at:857: $PREPARSER ./input stderr: -./glr-regression.at:945: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -739. glr-regression.at:945: ok -./glr-regression.at:672: $PREPARSER ./glr-regr5 +./glr-regression.at:944: $PREPARSER ./glr-regr8 stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:672: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -731. glr-regression.at:672: ok - - -747. glr-regression.at:1174: testing Undesirable destructors if user action cuts parse: glr.c ... -./glr-regression.at:1174: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.c glr-regr11.y -748. glr-regression.at:1175: testing Undesirable destructors if user action cuts parse: glr.cc ... -./glr-regression.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y +./glr-regression.at:944: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +738. glr-regression.at:944: ok stderr: stdout: -./glr-regression.at:1037: $PREPARSER ./glr-regr9 +./glr-regression.at:844: $PREPARSER ./glr-regr7 stderr: memory exhausted -./glr-regression.at:1037: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -742. glr-regression.at:1037: ok -./glr-regression.at:1175: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS -./glr-regression.at:1174: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr11 glr-regr11.c $LIBS +./glr-regression.at:844: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +736. glr-regression.at:844: ok stderr: stdout: -./glr-regression.at:1102: $PREPARSER ./glr-regr10 -stderr: -./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -744. glr-regression.at:1102: ok -749. glr-regression.at:1176: testing Undesirable destructors if user action cuts parse: glr2.cc ... -./glr-regression.at:1176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +741. glr-regression.at:1036: testing No users destructors if stack 0 deleted: glr.c ... +./glr-regression.at:1036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.c glr-regr9.y stderr: stdout: -./glr-regression.at:740: $PREPARSER ./glr-regr6 +./c++.at:1363: $PREPARSER ./input i +./glr-regression.at:594: $PREPARSER ./glr-regr4 stderr: -Ambiguity detected. -Option 1, - start -> - 'a' - -Option 2, - start -> - 'a' - -syntax is ambiguous -./glr-regression.at:740: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -734. glr-regression.at:740: ok -./glr-regression.at:1176: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS +stderr: +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:594: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap +725. glr-regression.at:594: ok +stderr: +742. glr-regression.at:1037: testing No users destructors if stack 0 deleted: glr.cc ... +./glr-regression.at:1037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap +./glr-regression.at:1036: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr9 glr-regr9.c $LIBS +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffd0851844f->Object::Object { } +0x7ffd08518520->Object::Object { 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'a' (0x7ffd08518520 'a') +0x7ffd08518470->Object::Object { 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x7ffd08518470, 0x7ffd08518520 } +Shifting token 'a' (0x7ffd08518470 'a') +0x55b532b42ee0->Object::Object { 0x7ffd08518470 } +0x7ffd08518470->Object::~Object { 0x55b532b42ee0, 0x7ffd08518470 } +Entering state 1 +Stack now 0 1 +0x7ffd08518540->Object::Object { 0x55b532b42ee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b532b42ee0 'a') +-> $$ = nterm item (0x7ffd08518540 'a') +0x55b532b42ee0->Object::~Object { 0x55b532b42ee0, 0x7ffd08518540 } +0x55b532b42ee0->Object::Object { 0x7ffd08518540 } +0x7ffd08518540->Object::~Object { 0x55b532b42ee0, 0x7ffd08518540 } +Entering state 10 +Stack now 0 10 +Reading a token +0x7ffd0851844f->Object::Object { 0x55b532b42ee0 } +0x7ffd08518520->Object::Object { 0x55b532b42ee0, 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x55b532b42ee0, 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'a' (0x7ffd08518520 'a') +0x7ffd08518470->Object::Object { 0x55b532b42ee0, 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x55b532b42ee0, 0x7ffd08518470, 0x7ffd08518520 } +Shifting token 'a' (0x7ffd08518470 'a') +0x55b532b42f00->Object::Object { 0x55b532b42ee0, 0x7ffd08518470 } +0x7ffd08518470->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518470 } +Entering state 1 +Stack now 0 10 1 +0x7ffd08518540->Object::Object { 0x55b532b42ee0, 0x55b532b42f00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b532b42f00 'a') +-> $$ = nterm item (0x7ffd08518540 'a') +0x55b532b42f00->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518540 } +0x55b532b42f00->Object::Object { 0x55b532b42ee0, 0x7ffd08518540 } +0x7ffd08518540->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518540 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7ffd0851844f->Object::Object { 0x55b532b42ee0, 0x55b532b42f00 } +0x7ffd08518520->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'a' (0x7ffd08518520 'a') +0x7ffd08518470->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518470, 0x7ffd08518520 } +Shifting token 'a' (0x7ffd08518470 'a') +0x55b532b42f20->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518470 } +0x7ffd08518470->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518470 } +Entering state 1 +Stack now 0 10 10 1 +0x7ffd08518540->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b532b42f20 'a') +-> $$ = nterm item (0x7ffd08518540 'a') +0x55b532b42f20->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518540 } +0x55b532b42f20->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518540 } +0x7ffd08518540->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518540 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7ffd0851844f->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20 } +0x7ffd08518520->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'a' (0x7ffd08518520 'a') +0x7ffd08518470->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518470, 0x7ffd08518520 } +Shifting token 'a' (0x7ffd08518470 'a') +0x55b532b42f40->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518470 } +0x7ffd08518470->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd08518470 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7ffd08518540->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b532b42f40 'a') +-> $$ = nterm item (0x7ffd08518540 'a') +0x55b532b42f40->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd08518540 } +0x55b532b42f40->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518540 } +0x7ffd08518540->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd08518540 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7ffd0851844f->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40 } +0x7ffd08518520->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'p' (0x7ffd08518520 'p'Exception caught: cleaning lookahead and stack +0x55b532b42f40->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd08518520 } +0x55b532b42f20->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518520 } +0x55b532b42f00->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518520 } +0x55b532b42ee0->Object::~Object { 0x55b532b42ee0, 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x7ffd08518520 } +exception caught: printer +end { } +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ... -./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x7ffd0851844f->Object::Object { } +0x7ffd08518520->Object::Object { 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'a' (0x7ffd08518520 'a') +0x7ffd08518470->Object::Object { 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x7ffd08518470, 0x7ffd08518520 } +Shifting token 'a' (0x7ffd08518470 'a') +0x55b532b42ee0->Object::Object { 0x7ffd08518470 } +0x7ffd08518470->Object::~Object { 0x55b532b42ee0, 0x7ffd08518470 } +Entering state 1 +Stack now 0 1 +0x7ffd08518540->Object::Object { 0x55b532b42ee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b532b42ee0 'a') +-> $$ = nterm item (0x7ffd08518540 'a') +0x55b532b42ee0->Object::~Object { 0x55b532b42ee0, 0x7ffd08518540 } +0x55b532b42ee0->Object::Object { 0x7ffd08518540 } +0x7ffd08518540->Object::~Object { 0x55b532b42ee0, 0x7ffd08518540 } +Entering state 10 +Stack now 0 10 +Reading a token +0x7ffd0851844f->Object::Object { 0x55b532b42ee0 } +0x7ffd08518520->Object::Object { 0x55b532b42ee0, 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x55b532b42ee0, 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'a' (0x7ffd08518520 'a') +0x7ffd08518470->Object::Object { 0x55b532b42ee0, 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x55b532b42ee0, 0x7ffd08518470, 0x7ffd08518520 } +Shifting token 'a' (0x7ffd08518470 'a') +0x55b532b42f00->Object::Object { 0x55b532b42ee0, 0x7ffd08518470 } +0x7ffd08518470->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518470 } +Entering state 1 +Stack now 0 10 1 +0x7ffd08518540->Object::Object { 0x55b532b42ee0, 0x55b532b42f00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b532b42f00 'a') +-> $$ = nterm item (0x7ffd08518540 'a') +0x55b532b42f00->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518540 } +0x55b532b42f00->Object::Object { 0x55b532b42ee0, 0x7ffd08518540 } +0x7ffd08518540->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518540 } +Entering state 10 +Stack now 0 10 10 +Reading a token +0x7ffd0851844f->Object::Object { 0x55b532b42ee0, 0x55b532b42f00 } +0x7ffd08518520->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'a' (0x7ffd08518520 'a') +0x7ffd08518470->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518470, 0x7ffd08518520 } +Shifting token 'a' (0x7ffd08518470 'a') +0x55b532b42f20->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518470 } +0x7ffd08518470->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518470 } +Entering state 1 +Stack now 0 10 10 1 +0x7ffd08518540->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b532b42f20 'a') +-> $$ = nterm item (0x7ffd08518540 'a') +0x55b532b42f20->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518540 } +0x55b532b42f20->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518540 } +0x7ffd08518540->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518540 } +Entering state 10 +Stack now 0 10 10 10 +Reading a token +0x7ffd0851844f->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20 } +0x7ffd08518520->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'a' (0x7ffd08518520 'a') +0x7ffd08518470->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518470, 0x7ffd08518520 } +Shifting token 'a' (0x7ffd08518470 'a') +0x55b532b42f40->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518470 } +0x7ffd08518470->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd08518470 } +Entering state 1 +Stack now 0 10 10 10 1 +0x7ffd08518540->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b532b42f40 'a') +-> $$ = nterm item (0x7ffd08518540 'a') +0x55b532b42f40->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd08518540 } +0x55b532b42f40->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518540 } +0x7ffd08518540->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd08518540 } +Entering state 10 +Stack now 0 10 10 10 10 +Reading a token +0x7ffd0851844f->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40 } +0x7ffd08518520->Object::Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd0851844f } +0x7ffd0851844f->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd0851844f, 0x7ffd08518520 } +Next token is token 'p' (0x7ffd08518520 'p'Exception caught: cleaning lookahead and stack +0x55b532b42f40->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x55b532b42f40, 0x7ffd08518520 } +0x55b532b42f20->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x55b532b42f20, 0x7ffd08518520 } +0x55b532b42f00->Object::~Object { 0x55b532b42ee0, 0x55b532b42f00, 0x7ffd08518520 } +0x55b532b42ee0->Object::~Object { 0x55b532b42ee0, 0x7ffd08518520 } +0x7ffd08518520->Object::~Object { 0x7ffd08518520 } +exception caught: printer +end { } +./c++.at:1363: grep '^exception caught: printer$' stderr stdout: -./c++.at:858: $PREPARSER ./input +exception caught: printer +./c++.at:1363: $PREPARSER ./input aaaae stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -751. glr-regression.at:1311: testing Leaked semantic values if user action cuts parse: glr.cc ... -./glr-regression.at:1311: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y -./glr-regression.at:1310: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr12 glr-regr12.c $LIBS -./glr-regression.at:1311: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS +exception caught: syntax error +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stdout: +./c++.at:1363: $PREPARSER ./input aaaaE +./c++.at:1361: $PREPARSER ./input aaaas stderr: -./glr-regression.at:845: $PREPARSER ./glr-regr7 -stdout: +./glr-regression.at:1037: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS stderr: -./c++.at:1361: $PREPARSER ./input aaaas -memory exhausted -./glr-regression.at:845: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -737. glr-regression.at:845: stderr: - ok exception caught: reduction ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaal stderr: +./c++.at:1363: $PREPARSER ./input aaaaT +743. glr-regression.at:1038: testing No users destructors if stack 0 deleted: glr2.cc ... +./glr-regression.at:1038: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr9.cc glr-regr9.y exception caught: yylex ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input i stderr: exception caught: initial-action ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaR +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaap +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: -stderr: -stdout: +./glr-regression.at:1038: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr9 glr-regr9.cc $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -0x556b6cb1ab40->Object::Object { } -Next token is token 'a' (0x556b6cb1ab40 'a') -Shifting token 'a' (0x556b6cb1ab40 'a') +0x55940e0b9b40->Object::Object { } +Next token is token 'a' (0x55940e0b9b40 'a') +Shifting token 'a' (0x55940e0b9b40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556b6cb1ab40 'a') --> $$ = nterm item (0x556b6cb1ab40 'a') + $1 = token 'a' (0x55940e0b9b40 'a') +-> $$ = nterm item (0x55940e0b9b40 'a') Entering state 10 Stack now 0 10 Reading a token -0x556b6cb1ab90->Object::Object { 0x556b6cb1ab40 } -Next token is token 'a' (0x556b6cb1ab90 'a') -Shifting token 'a' (0x556b6cb1ab90 'a') +0x55940e0b9b90->Object::Object { 0x55940e0b9b40 } +Next token is token 'a' (0x55940e0b9b90 'a') +Shifting token 'a' (0x55940e0b9b90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556b6cb1ab90 'a') --> $$ = nterm item (0x556b6cb1ab90 'a') + $1 = token 'a' (0x55940e0b9b90 'a') +-> $$ = nterm item (0x55940e0b9b90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x556b6cb1abe0->Object::Object { 0x556b6cb1ab40, 0x556b6cb1ab90 } -Next token is token 'a' (0x556b6cb1abe0 'a') -Shifting token 'a' (0x556b6cb1abe0 'a') +0x55940e0b9be0->Object::Object { 0x55940e0b9b40, 0x55940e0b9b90 } +Next token is token 'a' (0x55940e0b9be0 'a') +Shifting token 'a' (0x55940e0b9be0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556b6cb1abe0 'a') --> $$ = nterm item (0x556b6cb1abe0 'a') + $1 = token 'a' (0x55940e0b9be0 'a') +-> $$ = nterm item (0x55940e0b9be0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x556b6cb1ac30->Object::Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0 } -Next token is token 'a' (0x556b6cb1ac30 'a') -Shifting token 'a' (0x556b6cb1ac30 'a') +0x55940e0b9c30->Object::Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0 } +Next token is token 'a' (0x55940e0b9c30 'a') +Shifting token 'a' (0x55940e0b9c30 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556b6cb1ac30 'a') --> $$ = nterm item (0x556b6cb1ac30 'a') + $1 = token 'a' (0x55940e0b9c30 'a') +-> $$ = nterm item (0x55940e0b9c30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x556b6cb1ac80->Object::Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0, 0x556b6cb1ac30 } -Next token is token 'p' (0x556b6cb1ac80 'p'Exception caught: cleaning lookahead and stack -0x556b6cb1ac80->Object::~Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0, 0x556b6cb1ac30, 0x556b6cb1ac80 } -0x556b6cb1ac30->Object::~Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0, 0x556b6cb1ac30 } -0x556b6cb1abe0->Object::~Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0 } -0x556b6cb1ab90->Object::~Object { 0x556b6cb1ab40, 0x556b6cb1ab90 } -0x556b6cb1ab40->Object::~Object { 0x556b6cb1ab40 } +0x55940e0b9c80->Object::Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0, 0x55940e0b9c30 } +Next token is token 'p' (0x55940e0b9c80 'p'Exception caught: cleaning lookahead and stack +0x55940e0b9c80->Object::~Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0, 0x55940e0b9c30, 0x55940e0b9c80 } +0x55940e0b9c30->Object::~Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0, 0x55940e0b9c30 } +0x55940e0b9be0->Object::~Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0 } +0x55940e0b9b90->Object::~Object { 0x55940e0b9b40, 0x55940e0b9b90 } +0x55940e0b9b40->Object::~Object { 0x55940e0b9b40 } exception caught: printer end { } ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -752. glr-regression.at:1312: testing Leaked semantic values if user action cuts parse: glr2.cc ... -./glr-regression.at:1312: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y -./glr-regression.at:1174: $PREPARSER ./glr-regr11 stderr: stderr: -stderr: -stdout: -./glr-regression.at:1174: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr Starting parse Entering state 0 Stack now 0 Reading a token -0x556b6cb1ab40->Object::Object { } -Next token is token 'a' (0x556b6cb1ab40 'a') -Shifting token 'a' (0x556b6cb1ab40 'a') +0x55940e0b9b40->Object::Object { } +Next token is token 'a' (0x55940e0b9b40 'a') +Shifting token 'a' (0x55940e0b9b40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556b6cb1ab40 'a') --> $$ = nterm item (0x556b6cb1ab40 'a') + $1 = token 'a' (0x55940e0b9b40 'a') +-> $$ = nterm item (0x55940e0b9b40 'a') Entering state 10 Stack now 0 10 Reading a token -0x556b6cb1ab90->Object::Object { 0x556b6cb1ab40 } -Next token is token 'a' (0x556b6cb1ab90 'a') -Shifting token 'a' (0x556b6cb1ab90 'a') +0x55940e0b9b90->Object::Object { 0x55940e0b9b40 } +Next token is token 'a' (0x55940e0b9b90 'a') +Shifting token 'a' (0x55940e0b9b90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556b6cb1ab90 'a') --> $$ = nterm item (0x556b6cb1ab90 'a') + $1 = token 'a' (0x55940e0b9b90 'a') +-> $$ = nterm item (0x55940e0b9b90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x556b6cb1abe0->Object::Object { 0x556b6cb1ab40, 0x556b6cb1ab90 } -Next token is token 'a' (0x556b6cb1abe0 'a') -Shifting token 'a' (0x556b6cb1abe0 'a') +0x55940e0b9be0->Object::Object { 0x55940e0b9b40, 0x55940e0b9b90 } +Next token is token 'a' (0x55940e0b9be0 'a') +Shifting token 'a' (0x55940e0b9be0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556b6cb1abe0 'a') --> $$ = nterm item (0x556b6cb1abe0 'a') + $1 = token 'a' (0x55940e0b9be0 'a') +-> $$ = nterm item (0x55940e0b9be0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x556b6cb1ac30->Object::Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0 } -Next token is token 'a' (0x556b6cb1ac30 'a') -Shifting token 'a' (0x556b6cb1ac30 'a') +0x55940e0b9c30->Object::Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0 } +Next token is token 'a' (0x55940e0b9c30 'a') +Shifting token 'a' (0x55940e0b9c30 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x556b6cb1ac30 'a') --> $$ = nterm item (0x556b6cb1ac30 'a') + $1 = token 'a' (0x55940e0b9c30 'a') +-> $$ = nterm item (0x55940e0b9c30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x556b6cb1ac80->Object::Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0, 0x556b6cb1ac30 } -Next token is token 'p' (0x556b6cb1ac80 'p'Exception caught: cleaning lookahead and stack -0x556b6cb1ac80->Object::~Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0, 0x556b6cb1ac30, 0x556b6cb1ac80 } -0x556b6cb1ac30->Object::~Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0, 0x556b6cb1ac30 } -0x556b6cb1abe0->Object::~Object { 0x556b6cb1ab40, 0x556b6cb1ab90, 0x556b6cb1abe0 } -0x556b6cb1ab90->Object::~Object { 0x556b6cb1ab40, 0x556b6cb1ab90 } -0x556b6cb1ab40->Object::~Object { 0x556b6cb1ab40 } +0x55940e0b9c80->Object::Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0, 0x55940e0b9c30 } +Next token is token 'p' (0x55940e0b9c80 'p'Exception caught: cleaning lookahead and stack +0x55940e0b9c80->Object::~Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0, 0x55940e0b9c30, 0x55940e0b9c80 } +0x55940e0b9c30->Object::~Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0, 0x55940e0b9c30 } +0x55940e0b9be0->Object::~Object { 0x55940e0b9b40, 0x55940e0b9b90, 0x55940e0b9be0 } +0x55940e0b9b90->Object::~Object { 0x55940e0b9b40, 0x55940e0b9b90 } +0x55940e0b9b40->Object::~Object { 0x55940e0b9b40 } exception caught: printer end { } ./c++.at:1361: grep '^exception caught: printer$' stderr -./glr-regression.at:1103: $PREPARSER ./glr-regr10 stdout: -stderr: +stdout: +./glr-regression.at:672: $PREPARSER ./glr-regr5 exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae -./glr-regression.at:1103: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -747. glr-regression.at:1174: ok stderr: +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' + +syntax is ambiguous +stderr: +./glr-regression.at:672: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: syntax error ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -745. glr-regression.at:1103: ok +731. glr-regression.at:672: ok ./c++.at:1361: $PREPARSER ./input aaaaE - stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1312: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS ./c++.at:1361: $PREPARSER ./input aaaaT stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -268792,50 +268684,75 @@ stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' -753. glr-regression.at:1445: testing Incorrect lookahead during deterministic GLR: glr.c ... -./glr-regression.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.c glr-regr13.y ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -754. glr-regression.at:1446: testing Incorrect lookahead during deterministic GLR: glr.cc ... -./glr-regression.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y -./glr-regression.at:1445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr13 glr-regr13.c $LIBS -./glr-regression.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS +744. glr-regression.at:1102: testing Corrupted semantic options if user action cuts parse: glr.c ... +./glr-regression.at:1102: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.c glr-regr10.y stderr: stdout: -./glr-regression.at:1310: $PREPARSER ./glr-regr12 +./glr-regression.at:945: $PREPARSER ./glr-regr8 stderr: -./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -750. glr-regression.at:1310: ok +./glr-regression.at:945: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +739. glr-regression.at:945: ok +./glr-regression.at:1102: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr10 glr-regr10.c $LIBS + stderr: stdout: -./glr-regression.at:1175: $PREPARSER ./glr-regr11 +./glr-regression.at:598: $PREPARSER ./glr-regr4 +stderr: +./glr-regression.at:598: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +745. glr-regression.at:1103: testing Corrupted semantic options if user action cuts parse: glr.cc ... +./glr-regression.at:1103: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y +728. glr-regression.at:598: ok + +./glr-regression.at:1103: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS stderr: +stdout: +./c++.at:857: $PREPARSER ./input stderr: -./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +746. glr-regression.at:1104: testing Corrupted semantic options if user action cuts parse: glr2.cc ... +======== Testing with C++ standard flags: '' +./glr-regression.at:1104: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr10.cc glr-regr10.y +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:1104: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr10 glr-regr10.cc $LIBS +stderr: +stdout: +./glr-regression.at:740: $PREPARSER ./glr-regr6 +stderr: +Ambiguity detected. +Option 1, + start -> + 'a' + +Option 2, + start -> + 'a' +syntax is ambiguous +./glr-regression.at:740: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: stdout: -./glr-regression.at:946: $PREPARSER ./glr-regr8 +./glr-regression.at:1036: $PREPARSER ./glr-regr9 +734. glr-regression.at:740: ok stderr: -748. glr-regression.at:1175: ok -./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -755. glr-regression.at:1447: testing Incorrect lookahead during deterministic GLR: glr2.cc ... -740. glr-regression.at:946: ok -./glr-regression.at:1447: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y +memory exhausted +./glr-regression.at:1036: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +741. glr-regression.at:1036: ok -./glr-regression.at:1447: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS +747. glr-regression.at:1174: testing Undesirable destructors if user action cuts parse: glr.c ... +./glr-regression.at:1174: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.c glr-regr11.y +748. glr-regression.at:1175: testing Undesirable destructors if user action cuts parse: glr.cc ... +./glr-regression.at:1175: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y +./glr-regression.at:1174: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr11 glr-regr11.c $LIBS +./glr-regression.at:1175: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS stderr: stdout: -./c++.at:858: $PREPARSER ./input +./glr-regression.at:1102: $PREPARSER ./glr-regr10 stderr: -756. glr-regression.at:1678: testing Incorrect lookahead during nondeterministic GLR: glr.c ... -./glr-regression.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.c glr-regr14.y -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -757. glr-regression.at:1679: testing Incorrect lookahead during nondeterministic GLR: glr.cc ... -./glr-regression.at:1679: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y -./glr-regression.at:1678: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr14 glr-regr14.c $LIBS -./glr-regression.at:1679: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS +./glr-regression.at:1102: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +744. glr-regression.at:1102: ok + stderr: stdout: ./c++.at:1362: $PREPARSER ./input aaaas @@ -268850,8 +268767,10 @@ stderr: exception caught: initial-action ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +749. glr-regression.at:1176: testing Undesirable destructors if user action cuts parse: glr2.cc ... ./c++.at:1362: $PREPARSER ./input aaaap stderr: +./glr-regression.at:1176: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr11.cc glr-regr11.y ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: @@ -268859,99 +268778,99 @@ Entering state 0 Stack now 0 Reading a token -0x7ffe8a6c8db7->Object::Object { } -0x7ffe8a6c8e40->Object::Object { 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'a' (0x7ffe8a6c8e40 'a') -0x7ffe8a6c8d90->Object::Object { 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x7ffe8a6c8d90, 0x7ffe8a6c8e40 } -Shifting token 'a' (0x7ffe8a6c8d90 'a') -0x5595c6ac4ee0->Object::Object { 0x7ffe8a6c8d90 } -0x7ffe8a6c8d90->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8d90 } +0x7fff02cea6f7->Object::Object { } +0x7fff02cea780->Object::Object { 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'a' (0x7fff02cea780 'a') +0x7fff02cea6d0->Object::Object { 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x7fff02cea6d0, 0x7fff02cea780 } +Shifting token 'a' (0x7fff02cea6d0 'a') +0x55cbf768dee0->Object::Object { 0x7fff02cea6d0 } +0x7fff02cea6d0->Object::~Object { 0x55cbf768dee0, 0x7fff02cea6d0 } Entering state 2 Stack now 0 2 -0x7ffe8a6c8e60->Object::Object { 0x5595c6ac4ee0 } +0x7fff02cea7a0->Object::Object { 0x55cbf768dee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5595c6ac4ee0 'a') --> $$ = nterm item (0x7ffe8a6c8e60 'a') -0x5595c6ac4ee0->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e60 } -0x5595c6ac4ee0->Object::Object { 0x7ffe8a6c8e60 } -0x7ffe8a6c8e60->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e60 } + $1 = token 'a' (0x55cbf768dee0 'a') +-> $$ = nterm item (0x7fff02cea7a0 'a') +0x55cbf768dee0->Object::~Object { 0x55cbf768dee0, 0x7fff02cea7a0 } +0x55cbf768dee0->Object::Object { 0x7fff02cea7a0 } +0x7fff02cea7a0->Object::~Object { 0x55cbf768dee0, 0x7fff02cea7a0 } Entering state 11 Stack now 0 11 Reading a token -0x7ffe8a6c8db7->Object::Object { 0x5595c6ac4ee0 } -0x7ffe8a6c8e40->Object::Object { 0x5595c6ac4ee0, 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'a' (0x7ffe8a6c8e40 'a') -0x7ffe8a6c8d90->Object::Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8d90, 0x7ffe8a6c8e40 } -Shifting token 'a' (0x7ffe8a6c8d90 'a') -0x5595c6ac4f00->Object::Object { 0x5595c6ac4ee0, 0x7ffe8a6c8d90 } -0x7ffe8a6c8d90->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8d90 } +0x7fff02cea6f7->Object::Object { 0x55cbf768dee0 } +0x7fff02cea780->Object::Object { 0x55cbf768dee0, 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x55cbf768dee0, 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'a' (0x7fff02cea780 'a') +0x7fff02cea6d0->Object::Object { 0x55cbf768dee0, 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x55cbf768dee0, 0x7fff02cea6d0, 0x7fff02cea780 } +Shifting token 'a' (0x7fff02cea6d0 'a') +0x55cbf768df00->Object::Object { 0x55cbf768dee0, 0x7fff02cea6d0 } +0x7fff02cea6d0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6d0 } Entering state 2 Stack now 0 11 2 -0x7ffe8a6c8e60->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00 } +0x7fff02cea7a0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5595c6ac4f00 'a') --> $$ = nterm item (0x7ffe8a6c8e60 'a') -0x5595c6ac4f00->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e60 } -0x5595c6ac4f00->Object::Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e60 } -0x7ffe8a6c8e60->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e60 } + $1 = token 'a' (0x55cbf768df00 'a') +-> $$ = nterm item (0x7fff02cea7a0 'a') +0x55cbf768df00->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea7a0 } +0x55cbf768df00->Object::Object { 0x55cbf768dee0, 0x7fff02cea7a0 } +0x7fff02cea7a0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea7a0 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffe8a6c8db7->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00 } -0x7ffe8a6c8e40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'a' (0x7ffe8a6c8e40 'a') -0x7ffe8a6c8d90->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8d90, 0x7ffe8a6c8e40 } -Shifting token 'a' (0x7ffe8a6c8d90 'a') -0x5595c6ac4f20->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8d90 } -0x7ffe8a6c8d90->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8d90 } +0x7fff02cea6f7->Object::Object { 0x55cbf768dee0, 0x55cbf768df00 } +0x7fff02cea780->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'a' (0x7fff02cea780 'a') +0x7fff02cea6d0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6d0, 0x7fff02cea780 } +Shifting token 'a' (0x7fff02cea6d0 'a') +0x55cbf768df20->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6d0 } +0x7fff02cea6d0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6d0 } Entering state 2 Stack now 0 11 11 2 -0x7ffe8a6c8e60->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20 } +0x7fff02cea7a0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5595c6ac4f20 'a') --> $$ = nterm item (0x7ffe8a6c8e60 'a') -0x5595c6ac4f20->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e60 } -0x5595c6ac4f20->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e60 } -0x7ffe8a6c8e60->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e60 } + $1 = token 'a' (0x55cbf768df20 'a') +-> $$ = nterm item (0x7fff02cea7a0 'a') +0x55cbf768df20->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea7a0 } +0x55cbf768df20->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea7a0 } +0x7fff02cea7a0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea7a0 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffe8a6c8db7->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20 } -0x7ffe8a6c8e40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'a' (0x7ffe8a6c8e40 'a') -0x7ffe8a6c8d90->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8d90, 0x7ffe8a6c8e40 } -Shifting token 'a' (0x7ffe8a6c8d90 'a') -0x5595c6ac4f40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8d90 } -0x7ffe8a6c8d90->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8d90 } +0x7fff02cea6f7->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20 } +0x7fff02cea780->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'a' (0x7fff02cea780 'a') +0x7fff02cea6d0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6d0, 0x7fff02cea780 } +Shifting token 'a' (0x7fff02cea6d0 'a') +0x55cbf768df40->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6d0 } +0x7fff02cea6d0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea6d0 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffe8a6c8e60->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40 } +0x7fff02cea7a0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5595c6ac4f40 'a') --> $$ = nterm item (0x7ffe8a6c8e60 'a') -0x5595c6ac4f40->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8e60 } -0x5595c6ac4f40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e60 } -0x7ffe8a6c8e60->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8e60 } + $1 = token 'a' (0x55cbf768df40 'a') +-> $$ = nterm item (0x7fff02cea7a0 'a') +0x55cbf768df40->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea7a0 } +0x55cbf768df40->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea7a0 } +0x7fff02cea7a0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea7a0 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffe8a6c8db7->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40 } -0x7ffe8a6c8e40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'p' (0x7ffe8a6c8e40 'p'Exception caught: cleaning lookahead and stack -0x5595c6ac4f40->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8e40 } -0x5595c6ac4f20->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e40 } -0x5595c6ac4f00->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e40 } -0x5595c6ac4ee0->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x7ffe8a6c8e40 } +0x7fff02cea6f7->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40 } +0x7fff02cea780->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'p' (0x7fff02cea780 'p'Exception caught: cleaning lookahead and stack +0x55cbf768df40->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea780 } +0x55cbf768df20->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea780 } +0x55cbf768df00->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea780 } +0x55cbf768dee0->Object::~Object { 0x55cbf768dee0, 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x7fff02cea780 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -268960,201 +268879,203 @@ Entering state 0 Stack now 0 Reading a token -0x7ffe8a6c8db7->Object::Object { } -0x7ffe8a6c8e40->Object::Object { 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'a' (0x7ffe8a6c8e40 'a') -0x7ffe8a6c8d90->Object::Object { 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x7ffe8a6c8d90, 0x7ffe8a6c8e40 } -Shifting token 'a' (0x7ffe8a6c8d90 'a') -0x5595c6ac4ee0->Object::Object { 0x7ffe8a6c8d90 } -0x7ffe8a6c8d90->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8d90 } +0x7fff02cea6f7->Object::Object { } +0x7fff02cea780->Object::Object { 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'a' (0x7fff02cea780 'a') +0x7fff02cea6d0->Object::Object { 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x7fff02cea6d0, 0x7fff02cea780 } +Shifting token 'a' (0x7fff02cea6d0 'a') +0x55cbf768dee0->Object::Object { 0x7fff02cea6d0 } +0x7fff02cea6d0->Object::~Object { 0x55cbf768dee0, 0x7fff02cea6d0 } Entering state 2 Stack now 0 2 -0x7ffe8a6c8e60->Object::Object { 0x5595c6ac4ee0 } +0x7fff02cea7a0->Object::Object { 0x55cbf768dee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5595c6ac4ee0 'a') --> $$ = nterm item (0x7ffe8a6c8e60 'a') -0x5595c6ac4ee0->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e60 } -0x5595c6ac4ee0->Object::Object { 0x7ffe8a6c8e60 } -0x7ffe8a6c8e60->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e60 } + $1 = token 'a' (0x55cbf768dee0 'a') +-> $$ = nterm item (0x7fff02cea7a0 'a') +0x55cbf768dee0->Object::~Object { 0x55cbf768dee0, 0x7fff02cea7a0 } +0x55cbf768dee0->Object::Object { 0x7fff02cea7a0 } +0x7fff02cea7a0->Object::~Object { 0x55cbf768dee0, 0x7fff02cea7a0 } Entering state 11 Stack now 0 11 Reading a token -0x7ffe8a6c8db7->Object::Object { 0x5595c6ac4ee0 } -0x7ffe8a6c8e40->Object::Object { 0x5595c6ac4ee0, 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'a' (0x7ffe8a6c8e40 'a') -0x7ffe8a6c8d90->Object::Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8d90, 0x7ffe8a6c8e40 } -Shifting token 'a' (0x7ffe8a6c8d90 'a') -0x5595c6ac4f00->Object::Object { 0x5595c6ac4ee0, 0x7ffe8a6c8d90 } -0x7ffe8a6c8d90->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8d90 } +0x7fff02cea6f7->Object::Object { 0x55cbf768dee0 } +0x7fff02cea780->Object::Object { 0x55cbf768dee0, 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x55cbf768dee0, 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'a' (0x7fff02cea780 'a') +0x7fff02cea6d0->Object::Object { 0x55cbf768dee0, 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x55cbf768dee0, 0x7fff02cea6d0, 0x7fff02cea780 } +Shifting token 'a' (0x7fff02cea6d0 'a') +0x55cbf768df00->Object::Object { 0x55cbf768dee0, 0x7fff02cea6d0 } +0x7fff02cea6d0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6d0 } Entering state 2 Stack now 0 11 2 -0x7ffe8a6c8e60->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00 } +0x7fff02cea7a0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5595c6ac4f00 'a') --> $$ = nterm item (0x7ffe8a6c8e60 'a') -0x5595c6ac4f00->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e60 } -0x5595c6ac4f00->Object::Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e60 } -0x7ffe8a6c8e60->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e60 } + $1 = token 'a' (0x55cbf768df00 'a') +-> $$ = nterm item (0x7fff02cea7a0 'a') +0x55cbf768df00->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea7a0 } +0x55cbf768df00->Object::Object { 0x55cbf768dee0, 0x7fff02cea7a0 } +0x7fff02cea7a0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea7a0 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffe8a6c8db7->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00 } -0x7ffe8a6c8e40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'a' (0x7ffe8a6c8e40 'a') -0x7ffe8a6c8d90->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8d90, 0x7ffe8a6c8e40 } -Shifting token 'a' (0x7ffe8a6c8d90 'a') -0x5595c6ac4f20->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8d90 } -0x7ffe8a6c8d90->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8d90 } +0x7fff02cea6f7->Object::Object { 0x55cbf768dee0, 0x55cbf768df00 } +0x7fff02cea780->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'a' (0x7fff02cea780 'a') +0x7fff02cea6d0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6d0, 0x7fff02cea780 } +Shifting token 'a' (0x7fff02cea6d0 'a') +0x55cbf768df20->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea6d0 } +0x7fff02cea6d0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6d0 } Entering state 2 Stack now 0 11 11 2 -0x7ffe8a6c8e60->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20 } +0x7fff02cea7a0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5595c6ac4f20 'a') --> $$ = nterm item (0x7ffe8a6c8e60 'a') -0x5595c6ac4f20->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e60 } -0x5595c6ac4f20->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e60 } -0x7ffe8a6c8e60->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e60 } + $1 = token 'a' (0x55cbf768df20 'a') +-> $$ = nterm item (0x7fff02cea7a0 'a') +0x55cbf768df20->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea7a0 } +0x55cbf768df20->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea7a0 } +0x7fff02cea7a0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea7a0 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffe8a6c8db7->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20 } -0x7ffe8a6c8e40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'a' (0x7ffe8a6c8e40 'a') -0x7ffe8a6c8d90->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8d90, 0x7ffe8a6c8e40 } -Shifting token 'a' (0x7ffe8a6c8d90 'a') -0x5595c6ac4f40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8d90 } -0x7ffe8a6c8d90->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8d90 } +0x7fff02cea6f7->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20 } +0x7fff02cea780->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'a' (0x7fff02cea780 'a') +0x7fff02cea6d0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6d0, 0x7fff02cea780 } +Shifting token 'a' (0x7fff02cea6d0 'a') +0x55cbf768df40->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea6d0 } +0x7fff02cea6d0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea6d0 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffe8a6c8e60->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40 } +0x7fff02cea7a0->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5595c6ac4f40 'a') --> $$ = nterm item (0x7ffe8a6c8e60 'a') -0x5595c6ac4f40->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8e60 } -0x5595c6ac4f40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e60 } -0x7ffe8a6c8e60->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8e60 } + $1 = token 'a' (0x55cbf768df40 'a') +-> $$ = nterm item (0x7fff02cea7a0 'a') +0x55cbf768df40->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea7a0 } +0x55cbf768df40->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea7a0 } +0x7fff02cea7a0->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea7a0 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffe8a6c8db7->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40 } -0x7ffe8a6c8e40->Object::Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8db7 } -0x7ffe8a6c8db7->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8db7, 0x7ffe8a6c8e40 } -Next token is token 'p' (0x7ffe8a6c8e40 'p'Exception caught: cleaning lookahead and stack -0x5595c6ac4f40->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x5595c6ac4f40, 0x7ffe8a6c8e40 } -0x5595c6ac4f20->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x5595c6ac4f20, 0x7ffe8a6c8e40 } -0x5595c6ac4f00->Object::~Object { 0x5595c6ac4ee0, 0x5595c6ac4f00, 0x7ffe8a6c8e40 } -0x5595c6ac4ee0->Object::~Object { 0x5595c6ac4ee0, 0x7ffe8a6c8e40 } -0x7ffe8a6c8e40->Object::~Object { 0x7ffe8a6c8e40 } +0x7fff02cea6f7->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40 } +0x7fff02cea780->Object::Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea6f7 } +0x7fff02cea6f7->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea6f7, 0x7fff02cea780 } +Next token is token 'p' (0x7fff02cea780 'p'Exception caught: cleaning lookahead and stack +0x55cbf768df40->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x55cbf768df40, 0x7fff02cea780 } +0x55cbf768df20->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x55cbf768df20, 0x7fff02cea780 } +0x55cbf768df00->Object::~Object { 0x55cbf768dee0, 0x55cbf768df00, 0x7fff02cea780 } +0x55cbf768dee0->Object::~Object { 0x55cbf768dee0, 0x7fff02cea780 } +0x7fff02cea780->Object::~Object { 0x7fff02cea780 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr stdout: exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae +./glr-regression.at:1176: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr11 glr-regr11.cc $LIBS stderr: exception caught: syntax error ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaT stderr: +./c++.at:1362: $PREPARSER ./input aaaaE stdout: stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1360: $PREPARSER ./input aaaas +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: reduction ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal +stdout: +./c++.at:1362: $PREPARSER ./input aaaaT +./glr-regression.at:1037: $PREPARSER ./glr-regr9 stderr: -exception caught: yylex +memory exhausted +./glr-regression.at:1037: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1360: $PREPARSER ./input aaaal +742. glr-regression.at:1037: stderr: + ok +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +exception caught: yylex ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -stdout: -./glr-regression.at:1311: $PREPARSER ./glr-regr12 -./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: $PREPARSER ./input aaaaR stderr: -./glr-regression.at:1311: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1360: $PREPARSER ./input i stderr: +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: initial-action ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -751. glr-regression.at:1311: ok +======== Testing with C++ standard flags: '' +./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1360: $PREPARSER ./input aaaap + stderr: ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ./c++.at:1360: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x5650646fcb40->Object::Object { } -Next token is token 'a' (0x5650646fcb40 'a') -Shifting token 'a' (0x5650646fcb40 'a') +0x5587fa9a6b40->Object::Object { } +Next token is token 'a' (0x5587fa9a6b40 'a') +Shifting token 'a' (0x5587fa9a6b40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5650646fcb40 'a') --> $$ = nterm item (0x5650646fcb40 'a') + $1 = token 'a' (0x5587fa9a6b40 'a') +-> $$ = nterm item (0x5587fa9a6b40 'a') Entering state 11 Stack now 0 11 Reading a token -0x5650646fcb90->Object::Object { 0x5650646fcb40 } -Next token is token 'a' (0x5650646fcb90 'a') -Shifting token 'a' (0x5650646fcb90 'a') +0x5587fa9a6b90->Object::Object { 0x5587fa9a6b40 } +Next token is token 'a' (0x5587fa9a6b90 'a') +Shifting token 'a' (0x5587fa9a6b90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5650646fcb90 'a') --> $$ = nterm item (0x5650646fcb90 'a') + $1 = token 'a' (0x5587fa9a6b90 'a') +-> $$ = nterm item (0x5587fa9a6b90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x5650646fcbe0->Object::Object { 0x5650646fcb40, 0x5650646fcb90 } -Next token is token 'a' (0x5650646fcbe0 'a') -Shifting token 'a' (0x5650646fcbe0 'a') +0x5587fa9a6be0->Object::Object { 0x5587fa9a6b40, 0x5587fa9a6b90 } +Next token is token 'a' (0x5587fa9a6be0 'a') +Shifting token 'a' (0x5587fa9a6be0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5650646fcbe0 'a') --> $$ = nterm item (0x5650646fcbe0 'a') + $1 = token 'a' (0x5587fa9a6be0 'a') +-> $$ = nterm item (0x5587fa9a6be0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x5650646fcc30->Object::Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0 } -Next token is token 'a' (0x5650646fcc30 'a') -Shifting token 'a' (0x5650646fcc30 'a') +0x5587fa9a6c30->Object::Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0 } +Next token is token 'a' (0x5587fa9a6c30 'a') +Shifting token 'a' (0x5587fa9a6c30 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5650646fcc30 'a') --> $$ = nterm item (0x5650646fcc30 'a') + $1 = token 'a' (0x5587fa9a6c30 'a') +-> $$ = nterm item (0x5587fa9a6c30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x5650646fcc80->Object::Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0, 0x5650646fcc30 } -Next token is token 'p' (0x5650646fcc80 'p'Exception caught: cleaning lookahead and stack -0x5650646fcc80->Object::~Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0, 0x5650646fcc30, 0x5650646fcc80 } -0x5650646fcc30->Object::~Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0, 0x5650646fcc30 } -0x5650646fcbe0->Object::~Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0 } -0x5650646fcb90->Object::~Object { 0x5650646fcb40, 0x5650646fcb90 } -0x5650646fcb40->Object::~Object { 0x5650646fcb40 } +0x5587fa9a6c80->Object::Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0, 0x5587fa9a6c30 } +Next token is token 'p' (0x5587fa9a6c80 'p'Exception caught: cleaning lookahead and stack +0x5587fa9a6c80->Object::~Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0, 0x5587fa9a6c30, 0x5587fa9a6c80 } +0x5587fa9a6c30->Object::~Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0, 0x5587fa9a6c30 } +0x5587fa9a6be0->Object::~Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0 } +0x5587fa9a6b90->Object::~Object { 0x5587fa9a6b40, 0x5587fa9a6b90 } +0x5587fa9a6b40->Object::~Object { 0x5587fa9a6b40 } exception caught: printer end { } ./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr @@ -269163,887 +269084,1059 @@ Entering state 0 Stack now 0 Reading a token -0x5650646fcb40->Object::Object { } -Next token is token 'a' (0x5650646fcb40 'a') -Shifting token 'a' (0x5650646fcb40 'a') +0x5587fa9a6b40->Object::Object { } +Next token is token 'a' (0x5587fa9a6b40 'a') +Shifting token 'a' (0x5587fa9a6b40 'a') Entering state 2 Stack now 0 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5650646fcb40 'a') --> $$ = nterm item (0x5650646fcb40 'a') + $1 = token 'a' (0x5587fa9a6b40 'a') +-> $$ = nterm item (0x5587fa9a6b40 'a') Entering state 11 Stack now 0 11 Reading a token -0x5650646fcb90->Object::Object { 0x5650646fcb40 } -Next token is token 'a' (0x5650646fcb90 'a') -Shifting token 'a' (0x5650646fcb90 'a') +0x5587fa9a6b90->Object::Object { 0x5587fa9a6b40 } +Next token is token 'a' (0x5587fa9a6b90 'a') +Shifting token 'a' (0x5587fa9a6b90 'a') Entering state 2 Stack now 0 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5650646fcb90 'a') --> $$ = nterm item (0x5650646fcb90 'a') + $1 = token 'a' (0x5587fa9a6b90 'a') +-> $$ = nterm item (0x5587fa9a6b90 'a') Entering state 11 Stack now 0 11 11 Reading a token -0x5650646fcbe0->Object::Object { 0x5650646fcb40, 0x5650646fcb90 } -Next token is token 'a' (0x5650646fcbe0 'a') -Shifting token 'a' (0x5650646fcbe0 'a') +0x5587fa9a6be0->Object::Object { 0x5587fa9a6b40, 0x5587fa9a6b90 } +Next token is token 'a' (0x5587fa9a6be0 'a') +Shifting token 'a' (0x5587fa9a6be0 'a') Entering state 2 Stack now 0 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5650646fcbe0 'a') --> $$ = nterm item (0x5650646fcbe0 'a') + $1 = token 'a' (0x5587fa9a6be0 'a') +-> $$ = nterm item (0x5587fa9a6be0 'a') Entering state 11 Stack now 0 11 11 11 Reading a token -0x5650646fcc30->Object::Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0 } -Next token is token 'a' (0x5650646fcc30 'a') -Shifting token 'a' (0x5650646fcc30 'a') +0x5587fa9a6c30->Object::Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0 } +Next token is token 'a' (0x5587fa9a6c30 'a') +Shifting token 'a' (0x5587fa9a6c30 'a') Entering state 2 Stack now 0 11 11 11 2 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x5650646fcc30 'a') --> $$ = nterm item (0x5650646fcc30 'a') + $1 = token 'a' (0x5587fa9a6c30 'a') +-> $$ = nterm item (0x5587fa9a6c30 'a') Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x5650646fcc80->Object::Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0, 0x5650646fcc30 } -Next token is token 'p' (0x5650646fcc80 'p'Exception caught: cleaning lookahead and stack -0x5650646fcc80->Object::~Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0, 0x5650646fcc30, 0x5650646fcc80 } -0x5650646fcc30->Object::~Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0, 0x5650646fcc30 } -0x5650646fcbe0->Object::~Object { 0x5650646fcb40, 0x5650646fcb90, 0x5650646fcbe0 } -0x5650646fcb90->Object::~Object { 0x5650646fcb40, 0x5650646fcb90 } -0x5650646fcb40->Object::~Object { 0x5650646fcb40 } +0x5587fa9a6c80->Object::Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0, 0x5587fa9a6c30 } +Next token is token 'p' (0x5587fa9a6c80 'p'Exception caught: cleaning lookahead and stack +0x5587fa9a6c80->Object::~Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0, 0x5587fa9a6c30, 0x5587fa9a6c80 } +0x5587fa9a6c30->Object::~Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0, 0x5587fa9a6c30 } +0x5587fa9a6be0->Object::~Object { 0x5587fa9a6b40, 0x5587fa9a6b90, 0x5587fa9a6be0 } +0x5587fa9a6b90->Object::~Object { 0x5587fa9a6b40, 0x5587fa9a6b90 } +0x5587fa9a6b40->Object::~Object { 0x5587fa9a6b40 } exception caught: printer end { } ./c++.at:1360: grep '^exception caught: printer$' stderr +750. glr-regression.at:1310: testing Leaked semantic values if user action cuts parse: glr.c ... +./glr-regression.at:1310: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.c glr-regr12.y stdout: exception caught: printer ./c++.at:1360: $PREPARSER ./input aaaae stderr: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaT +./glr-regression.at:1310: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr12 glr-regr12.c $LIBS +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:857: $PREPARSER ./input +stderr: +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:857: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./glr-regression.at:1174: $PREPARSER ./glr-regr11 +stderr: +./glr-regression.at:1174: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +747. glr-regression.at:1174: ok +stdout: +./glr-regression.at:845: $PREPARSER ./glr-regr7 +stderr: +memory exhausted +./glr-regression.at:845: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +737. glr-regression.at:845: ok + +stderr: +stdout: +./glr-regression.at:1103: $PREPARSER ./glr-regr10 +stderr: +./glr-regression.at:1103: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +751. glr-regression.at:1311: testing Leaked semantic values if user action cuts parse: glr.cc ... +745. glr-regression.at:1103: ok +./glr-regression.at:1311: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y +752. glr-regression.at:1312: testing Leaked semantic values if user action cuts parse: glr2.cc ... +./glr-regression.at:1312: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr12.cc glr-regr12.y + +./glr-regression.at:1312: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS +./glr-regression.at:1311: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr12 glr-regr12.cc $LIBS +753. glr-regression.at:1445: testing Incorrect lookahead during deterministic GLR: glr.c ... +./glr-regression.at:1445: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.c glr-regr13.y +stderr: stdout: ./c++.at:1363: $PREPARSER ./input aaaas +./glr-regression.at:1445: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr13 glr-regr13.c $LIBS stderr: exception caught: reduction ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -exception caught: syntax error ./c++.at:1363: $PREPARSER ./input aaaal -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: yylex ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1363: $PREPARSER ./input i -./c++.at:1360: $PREPARSER ./input aaaaE +stdout: stderr: +./glr-regression.at:1310: $PREPARSER ./glr-regr12 stderr: exception caught: initial-action ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1310: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1363: $PREPARSER ./input aaaap stderr: +750. glr-regression.at:1310: ok ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT ./c++.at:1363: $PREPARSER ./input --debug aaaap stderr: -stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffc25eb0b4f->Object::Object { } -0x7ffc25eb0c20->Object::Object { 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'a' (0x7ffc25eb0c20 'a') -0x7ffc25eb0b70->Object::Object { 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x7ffc25eb0b70, 0x7ffc25eb0c20 } -Shifting token 'a' (0x7ffc25eb0b70 'a') -0x5620d756eee0->Object::Object { 0x7ffc25eb0b70 } -0x7ffc25eb0b70->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0b70 } +0x7ffcb1fd91af->Object::Object { } +0x7ffcb1fd9280->Object::Object { 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'a' (0x7ffcb1fd9280 'a') +0x7ffcb1fd91d0->Object::Object { 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x7ffcb1fd91d0, 0x7ffcb1fd9280 } +Shifting token 'a' (0x7ffcb1fd91d0 'a') +0x55c245342ee0->Object::Object { 0x7ffcb1fd91d0 } +0x7ffcb1fd91d0->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd91d0 } Entering state 1 Stack now 0 1 -0x7ffc25eb0c40->Object::Object { 0x5620d756eee0 } +0x7ffcb1fd92a0->Object::Object { 0x55c245342ee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5620d756eee0 'a') --> $$ = nterm item (0x7ffc25eb0c40 'a') -0x5620d756eee0->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0c40 } -0x5620d756eee0->Object::Object { 0x7ffc25eb0c40 } -0x7ffc25eb0c40->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0c40 } + $1 = token 'a' (0x55c245342ee0 'a') +-> $$ = nterm item (0x7ffcb1fd92a0 'a') +0x55c245342ee0->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd92a0 } +0x55c245342ee0->Object::Object { 0x7ffcb1fd92a0 } +0x7ffcb1fd92a0->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd92a0 } Entering state 10 Stack now 0 10 Reading a token -0x7ffc25eb0b4f->Object::Object { 0x5620d756eee0 } -0x7ffc25eb0c20->Object::Object { 0x5620d756eee0, 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'a' (0x7ffc25eb0c20 'a') -0x7ffc25eb0b70->Object::Object { 0x5620d756eee0, 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0b70, 0x7ffc25eb0c20 } -Shifting token 'a' (0x7ffc25eb0b70 'a') -0x5620d756ef00->Object::Object { 0x5620d756eee0, 0x7ffc25eb0b70 } -0x7ffc25eb0b70->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b70 } +0x7ffcb1fd91af->Object::Object { 0x55c245342ee0 } +0x7ffcb1fd9280->Object::Object { 0x55c245342ee0, 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'a' (0x7ffcb1fd9280 'a') +0x7ffcb1fd91d0->Object::Object { 0x55c245342ee0, 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd91d0, 0x7ffcb1fd9280 } +Shifting token 'a' (0x7ffcb1fd91d0 'a') +0x55c245342f00->Object::Object { 0x55c245342ee0, 0x7ffcb1fd91d0 } +0x7ffcb1fd91d0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91d0 } Entering state 1 Stack now 0 10 1 -0x7ffc25eb0c40->Object::Object { 0x5620d756eee0, 0x5620d756ef00 } +0x7ffcb1fd92a0->Object::Object { 0x55c245342ee0, 0x55c245342f00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5620d756ef00 'a') --> $$ = nterm item (0x7ffc25eb0c40 'a') -0x5620d756ef00->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c40 } -0x5620d756ef00->Object::Object { 0x5620d756eee0, 0x7ffc25eb0c40 } -0x7ffc25eb0c40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c40 } + $1 = token 'a' (0x55c245342f00 'a') +-> $$ = nterm item (0x7ffcb1fd92a0 'a') +0x55c245342f00->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd92a0 } +0x55c245342f00->Object::Object { 0x55c245342ee0, 0x7ffcb1fd92a0 } +0x7ffcb1fd92a0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd92a0 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffc25eb0b4f->Object::Object { 0x5620d756eee0, 0x5620d756ef00 } -0x7ffc25eb0c20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'a' (0x7ffc25eb0c20 'a') -0x7ffc25eb0b70->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b70, 0x7ffc25eb0c20 } -Shifting token 'a' (0x7ffc25eb0b70 'a') -0x5620d756ef20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b70 } -0x7ffc25eb0b70->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b70 } +0x7ffcb1fd91af->Object::Object { 0x55c245342ee0, 0x55c245342f00 } +0x7ffcb1fd9280->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'a' (0x7ffcb1fd9280 'a') +0x7ffcb1fd91d0->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91d0, 0x7ffcb1fd9280 } +Shifting token 'a' (0x7ffcb1fd91d0 'a') +0x55c245342f20->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91d0 } +0x7ffcb1fd91d0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91d0 } Entering state 1 Stack now 0 10 10 1 -0x7ffc25eb0c40->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20 } +0x7ffcb1fd92a0->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5620d756ef20 'a') --> $$ = nterm item (0x7ffc25eb0c40 'a') -0x5620d756ef20->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c40 } -0x5620d756ef20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c40 } -0x7ffc25eb0c40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c40 } + $1 = token 'a' (0x55c245342f20 'a') +-> $$ = nterm item (0x7ffcb1fd92a0 'a') +0x55c245342f20->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd92a0 } +0x55c245342f20->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd92a0 } +0x7ffcb1fd92a0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd92a0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffc25eb0b4f->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20 } -0x7ffc25eb0c20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'a' (0x7ffc25eb0c20 'a') -0x7ffc25eb0b70->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b70, 0x7ffc25eb0c20 } -Shifting token 'a' (0x7ffc25eb0b70 'a') -0x5620d756ef40->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b70 } -0x7ffc25eb0b70->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0b70 } +0x7ffcb1fd91af->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20 } +0x7ffcb1fd9280->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'a' (0x7ffcb1fd9280 'a') +0x7ffcb1fd91d0->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91d0, 0x7ffcb1fd9280 } +Shifting token 'a' (0x7ffcb1fd91d0 'a') +0x55c245342f40->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91d0 } +0x7ffcb1fd91d0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd91d0 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffc25eb0c40->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40 } +0x7ffcb1fd92a0->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5620d756ef40 'a') --> $$ = nterm item (0x7ffc25eb0c40 'a') -0x5620d756ef40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0c40 } -0x5620d756ef40->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c40 } -0x7ffc25eb0c40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0c40 } + $1 = token 'a' (0x55c245342f40 'a') +-> $$ = nterm item (0x7ffcb1fd92a0 'a') +0x55c245342f40->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd92a0 } +0x55c245342f40->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd92a0 } +0x7ffcb1fd92a0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd92a0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffc25eb0b4f->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40 } -0x7ffc25eb0c20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'p' (0x7ffc25eb0c20 'p'Exception caught: cleaning lookahead and stack -0x5620d756ef40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0c20 } -0x5620d756ef20->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c20 } -0x5620d756ef00->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c20 } -0x5620d756eee0->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x7ffc25eb0c20 } +0x7ffcb1fd91af->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40 } +0x7ffcb1fd9280->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'p' (0x7ffcb1fd9280 'p'Exception caught: cleaning lookahead and stack +0x55c245342f40->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd9280 } +0x55c245342f20->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd9280 } +0x55c245342f00->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd9280 } +0x55c245342ee0->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x7ffcb1fd9280 } exception caught: printer end { } + ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -758. glr-regression.at:1680: testing Incorrect lookahead during nondeterministic GLR: glr2.cc ... -./glr-regression.at:1680: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y -./c++.at:1360: $PREPARSER ./input aaaaR stderr: stderr: +stdout: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffc25eb0b4f->Object::Object { } -0x7ffc25eb0c20->Object::Object { 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'a' (0x7ffc25eb0c20 'a') -0x7ffc25eb0b70->Object::Object { 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x7ffc25eb0b70, 0x7ffc25eb0c20 } -Shifting token 'a' (0x7ffc25eb0b70 'a') -0x5620d756eee0->Object::Object { 0x7ffc25eb0b70 } -0x7ffc25eb0b70->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0b70 } +0x7ffcb1fd91af->Object::Object { } +0x7ffcb1fd9280->Object::Object { 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'a' (0x7ffcb1fd9280 'a') +0x7ffcb1fd91d0->Object::Object { 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x7ffcb1fd91d0, 0x7ffcb1fd9280 } +Shifting token 'a' (0x7ffcb1fd91d0 'a') +0x55c245342ee0->Object::Object { 0x7ffcb1fd91d0 } +0x7ffcb1fd91d0->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd91d0 } Entering state 1 Stack now 0 1 -0x7ffc25eb0c40->Object::Object { 0x5620d756eee0 } +0x7ffcb1fd92a0->Object::Object { 0x55c245342ee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5620d756eee0 'a') --> $$ = nterm item (0x7ffc25eb0c40 'a') -0x5620d756eee0->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0c40 } -0x5620d756eee0->Object::Object { 0x7ffc25eb0c40 } -0x7ffc25eb0c40->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0c40 } + $1 = token 'a' (0x55c245342ee0 'a') +-> $$ = nterm item (0x7ffcb1fd92a0 'a') +0x55c245342ee0->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd92a0 } +0x55c245342ee0->Object::Object { 0x7ffcb1fd92a0 } +0x7ffcb1fd92a0->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd92a0 } Entering state 10 Stack now 0 10 Reading a token -0x7ffc25eb0b4f->Object::Object { 0x5620d756eee0 } -0x7ffc25eb0c20->Object::Object { 0x5620d756eee0, 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'a' (0x7ffc25eb0c20 'a') -0x7ffc25eb0b70->Object::Object { 0x5620d756eee0, 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0b70, 0x7ffc25eb0c20 } -Shifting token 'a' (0x7ffc25eb0b70 'a') -0x5620d756ef00->Object::Object { 0x5620d756eee0, 0x7ffc25eb0b70 } -0x7ffc25eb0b70->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b70 } +0x7ffcb1fd91af->Object::Object { 0x55c245342ee0 } +0x7ffcb1fd9280->Object::Object { 0x55c245342ee0, 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'a' (0x7ffcb1fd9280 'a') +0x7ffcb1fd91d0->Object::Object { 0x55c245342ee0, 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd91d0, 0x7ffcb1fd9280 } +Shifting token 'a' (0x7ffcb1fd91d0 'a') +0x55c245342f00->Object::Object { 0x55c245342ee0, 0x7ffcb1fd91d0 } +0x7ffcb1fd91d0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91d0 } Entering state 1 Stack now 0 10 1 -0x7ffc25eb0c40->Object::Object { 0x5620d756eee0, 0x5620d756ef00 } +0x7ffcb1fd92a0->Object::Object { 0x55c245342ee0, 0x55c245342f00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5620d756ef00 'a') --> $$ = nterm item (0x7ffc25eb0c40 'a') -0x5620d756ef00->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c40 } -0x5620d756ef00->Object::Object { 0x5620d756eee0, 0x7ffc25eb0c40 } -0x7ffc25eb0c40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c40 } + $1 = token 'a' (0x55c245342f00 'a') +-> $$ = nterm item (0x7ffcb1fd92a0 'a') +0x55c245342f00->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd92a0 } +0x55c245342f00->Object::Object { 0x55c245342ee0, 0x7ffcb1fd92a0 } +0x7ffcb1fd92a0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd92a0 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffc25eb0b4f->Object::Object { 0x5620d756eee0, 0x5620d756ef00 } -0x7ffc25eb0c20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'a' (0x7ffc25eb0c20 'a') -0x7ffc25eb0b70->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b70, 0x7ffc25eb0c20 } -Shifting token 'a' (0x7ffc25eb0b70 'a') -0x5620d756ef20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0b70 } -0x7ffc25eb0b70->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b70 } +0x7ffcb1fd91af->Object::Object { 0x55c245342ee0, 0x55c245342f00 } +0x7ffcb1fd9280->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'a' (0x7ffcb1fd9280 'a') +0x7ffcb1fd91d0->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91d0, 0x7ffcb1fd9280 } +Shifting token 'a' (0x7ffcb1fd91d0 'a') +0x55c245342f20->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd91d0 } +0x7ffcb1fd91d0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91d0 } Entering state 1 Stack now 0 10 10 1 -0x7ffc25eb0c40->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20 } +0x7ffcb1fd92a0->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5620d756ef20 'a') --> $$ = nterm item (0x7ffc25eb0c40 'a') -0x5620d756ef20->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c40 } -0x5620d756ef20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c40 } -0x7ffc25eb0c40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c40 } + $1 = token 'a' (0x55c245342f20 'a') +-> $$ = nterm item (0x7ffcb1fd92a0 'a') +0x55c245342f20->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd92a0 } +0x55c245342f20->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd92a0 } +0x7ffcb1fd92a0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd92a0 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffc25eb0b4f->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20 } -0x7ffc25eb0c20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'a' (0x7ffc25eb0c20 'a') -0x7ffc25eb0b70->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b70, 0x7ffc25eb0c20 } -Shifting token 'a' (0x7ffc25eb0b70 'a') -0x5620d756ef40->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0b70 } -0x7ffc25eb0b70->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0b70 } +0x7ffcb1fd91af->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20 } +0x7ffcb1fd9280->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'a' (0x7ffcb1fd9280 'a') +0x7ffcb1fd91d0->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91d0, 0x7ffcb1fd9280 } +Shifting token 'a' (0x7ffcb1fd91d0 'a') +0x55c245342f40->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd91d0 } +0x7ffcb1fd91d0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd91d0 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffc25eb0c40->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40 } +0x7ffcb1fd92a0->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5620d756ef40 'a') --> $$ = nterm item (0x7ffc25eb0c40 'a') -0x5620d756ef40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0c40 } -0x5620d756ef40->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c40 } -0x7ffc25eb0c40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0c40 } + $1 = token 'a' (0x55c245342f40 'a') +-> $$ = nterm item (0x7ffcb1fd92a0 'a') +0x55c245342f40->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd92a0 } +0x55c245342f40->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd92a0 } +0x7ffcb1fd92a0->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd92a0 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffc25eb0b4f->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40 } -0x7ffc25eb0c20->Object::Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0b4f } -0x7ffc25eb0b4f->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0b4f, 0x7ffc25eb0c20 } -Next token is token 'p' (0x7ffc25eb0c20 'p'Exception caught: cleaning lookahead and stack -0x5620d756ef40->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x5620d756ef40, 0x7ffc25eb0c20 } -0x5620d756ef20->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x5620d756ef20, 0x7ffc25eb0c20 } -0x5620d756ef00->Object::~Object { 0x5620d756eee0, 0x5620d756ef00, 0x7ffc25eb0c20 } -0x5620d756eee0->Object::~Object { 0x5620d756eee0, 0x7ffc25eb0c20 } -0x7ffc25eb0c20->Object::~Object { 0x7ffc25eb0c20 } +0x7ffcb1fd91af->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40 } +0x7ffcb1fd9280->Object::Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd91af } +0x7ffcb1fd91af->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd91af, 0x7ffcb1fd9280 } +Next token is token 'p' (0x7ffcb1fd9280 'p'Exception caught: cleaning lookahead and stack +0x55c245342f40->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x55c245342f40, 0x7ffcb1fd9280 } +0x55c245342f20->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x55c245342f20, 0x7ffcb1fd9280 } +0x55c245342f00->Object::~Object { 0x55c245342ee0, 0x55c245342f00, 0x7ffcb1fd9280 } +0x55c245342ee0->Object::~Object { 0x55c245342ee0, 0x7ffcb1fd9280 } +0x7ffcb1fd9280->Object::~Object { 0x7ffcb1fd9280 } exception caught: printer end { } ./c++.at:1363: grep '^exception caught: printer$' stderr -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaas stdout: exception caught: printer +stderr: ./c++.at:1363: $PREPARSER ./input aaaae +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -======== Testing with C++ standard flags: '' exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1363: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaT -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR -stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1680: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:1104: $PREPARSER ./glr-regr10 -stderr: -./glr-regression.at:1104: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -stdout: -746. glr-regression.at:1104: ok -./glr-regression.at:1038: $PREPARSER ./glr-regr9 -stderr: -stderr: -stdout: -memory exhausted -./glr-regression.at:1038: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1445: $PREPARSER ./glr-regr13 -stderr: -./glr-regression.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -743. glr-regression.at:1038: ok - -753. glr-regression.at:1445: ok - - -759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ... -./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y -761. glr-regression.at:1787: testing Leaked semantic values when reporting ambiguity: glr2.cc ... -760. glr-regression.at:1786: testing Leaked semantic values when reporting ambiguity: glr.cc ... -./glr-regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y -./glr-regression.at:1786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y -stderr: -stdout: -./glr-regression.at:1446: $PREPARSER ./glr-regr13 -./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS -stderr: -./glr-regression.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -754. glr-regression.at:1446: ok -./glr-regression.at:1786: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS - -./glr-regression.at:1787: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -762. glr-regression.at:1860: testing Leaked lookahead after nondeterministic parse syntax error: glr.c ... -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.c glr-regr16.y -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./glr-regression.at:1678: $PREPARSER ./glr-regr14 -stderr: -./glr-regression.at:1678: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1860: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr16 glr-regr16.c $LIBS -756. glr-regression.at:1678: ok - -763. glr-regression.at:1861: testing Leaked lookahead after nondeterministic parse syntax error: glr.cc ... -./glr-regression.at:1861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y -./glr-regression.at:1861: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS -stderr: -stderr: -stdout: -stdout: -./c++.at:1361: $PREPARSER ./input aaaas -./glr-regression.at:1860: $PREPARSER ./glr-regr16 -stderr: -stderr: -exception caught: reduction -syntax error -./glr-regression.at:1860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaal -762. glr-regression.at:1860: ok stderr: +754. glr-regression.at:1446: testing Incorrect lookahead during deterministic GLR: glr.cc ... +./glr-regression.at:1446: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y exception caught: yylex ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input i - stderr: exception caught: initial-action ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT stderr: ./c++.at:1361: $PREPARSER ./input aaaap -stdout: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1446: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS stderr: -./glr-regression.at:1312: $PREPARSER ./glr-regr12 +./c++.at:1363: $PREPARSER ./input aaaaR ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input --debug aaaap -./glr-regression.at:1679: $PREPARSER ./glr-regr14 +======== Testing with C++ standard flags: '' stderr: -752. glr-regression.at:1312: ok stderr: +./glr-regression.at:1175: $PREPARSER ./glr-regr11 +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -0x557e8d040b40->Object::Object { } -Next token is token 'a' (0x557e8d040b40 'a') -Shifting token 'a' (0x557e8d040b40 'a') +0x555569b57b40->Object::Object { } +Next token is token 'a' (0x555569b57b40 'a') +Shifting token 'a' (0x555569b57b40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557e8d040b40 'a') --> $$ = nterm item (0x557e8d040b40 'a') + $1 = token 'a' (0x555569b57b40 'a') +-> $$ = nterm item (0x555569b57b40 'a') Entering state 10 Stack now 0 10 Reading a token -0x557e8d040b90->Object::Object { 0x557e8d040b40 } -Next token is token 'a' (0x557e8d040b90 'a') -Shifting token 'a' (0x557e8d040b90 'a') +0x555569b57b90->Object::Object { 0x555569b57b40 } +Next token is token 'a' (0x555569b57b90 'a') +Shifting token 'a' (0x555569b57b90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557e8d040b90 'a') --> $$ = nterm item (0x557e8d040b90 'a') + $1 = token 'a' (0x555569b57b90 'a') +-> $$ = nterm item (0x555569b57b90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x557e8d040be0->Object::Object { 0x557e8d040b40, 0x557e8d040b90 } -Next token is token 'a' (0x557e8d040be0 'a') -Shifting token 'a' (0x557e8d040be0 'a') +0x555569b57be0->Object::Object { 0x555569b57b40, 0x555569b57b90 } +Next token is token 'a' (0x555569b57be0 'a') +Shifting token 'a' (0x555569b57be0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557e8d040be0 'a') --> $$ = nterm item (0x557e8d040be0 'a') + $1 = token 'a' (0x555569b57be0 'a') +-> $$ = nterm item (0x555569b57be0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x557e8d040c30->Object::Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0 } -Next token is token 'a' (0x557e8d040c30 'a') -Shifting token 'a' (0x557e8d040c30 'a') +0x555569b57c30->Object::Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0 } +Next token is token 'a' (0x555569b57c30 'a') +Shifting token 'a' (0x555569b57c30 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557e8d040c30 'a') --> $$ = nterm item (0x557e8d040c30 'a') + $1 = token 'a' (0x555569b57c30 'a') +-> $$ = nterm item (0x555569b57c30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x557e8d040c80->Object::Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0, 0x557e8d040c30 } -Next token is token 'p' (0x557e8d040c80 'p'Exception caught: cleaning lookahead and stack -0x557e8d040c80->Object::~Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0, 0x557e8d040c30, 0x557e8d040c80 } -0x557e8d040c30->Object::~Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0, 0x557e8d040c30 } -0x557e8d040be0->Object::~Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0 } -0x557e8d040b90->Object::~Object { 0x557e8d040b40, 0x557e8d040b90 } -0x557e8d040b40->Object::~Object { 0x557e8d040b40 } +0x555569b57c80->Object::Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0, 0x555569b57c30 } +Next token is token 'p' (0x555569b57c80 'p'Exception caught: cleaning lookahead and stack +0x555569b57c80->Object::~Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0, 0x555569b57c30, 0x555569b57c80 } +0x555569b57c30->Object::~Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0, 0x555569b57c30 } +0x555569b57be0->Object::~Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0 } +0x555569b57b90->Object::~Object { 0x555569b57b40, 0x555569b57b90 } +0x555569b57b40->Object::~Object { 0x555569b57b40 } exception caught: printer end { } -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -764. glr-regression.at:1862: testing Leaked lookahead after nondeterministic parse syntax error: glr2.cc ... -./glr-regression.at:1862: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y stdout: -./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1175: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:946: $PREPARSER ./glr-regr8 +748. glr-regression.at:1175: ok +stderr: stderr: -./glr-regression.at:1176: $PREPARSER ./glr-regr11 Starting parse Entering state 0 Stack now 0 Reading a token -0x557e8d040b40->Object::Object { } -Next token is token 'a' (0x557e8d040b40 'a') -Shifting token 'a' (0x557e8d040b40 'a') +0x555569b57b40->Object::Object { } +Next token is token 'a' (0x555569b57b40 'a') +Shifting token 'a' (0x555569b57b40 'a') Entering state 1 Stack now 0 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557e8d040b40 'a') --> $$ = nterm item (0x557e8d040b40 'a') + $1 = token 'a' (0x555569b57b40 'a') +-> $$ = nterm item (0x555569b57b40 'a') Entering state 10 Stack now 0 10 Reading a token -0x557e8d040b90->Object::Object { 0x557e8d040b40 } -Next token is token 'a' (0x557e8d040b90 'a') -Shifting token 'a' (0x557e8d040b90 'a') +0x555569b57b90->Object::Object { 0x555569b57b40 } +Next token is token 'a' (0x555569b57b90 'a') +Shifting token 'a' (0x555569b57b90 'a') Entering state 1 Stack now 0 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557e8d040b90 'a') --> $$ = nterm item (0x557e8d040b90 'a') + $1 = token 'a' (0x555569b57b90 'a') +-> $$ = nterm item (0x555569b57b90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x557e8d040be0->Object::Object { 0x557e8d040b40, 0x557e8d040b90 } -Next token is token 'a' (0x557e8d040be0 'a') -Shifting token 'a' (0x557e8d040be0 'a') +0x555569b57be0->Object::Object { 0x555569b57b40, 0x555569b57b90 } +Next token is token 'a' (0x555569b57be0 'a') +Shifting token 'a' (0x555569b57be0 'a') Entering state 1 Stack now 0 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557e8d040be0 'a') --> $$ = nterm item (0x557e8d040be0 'a') + $1 = token 'a' (0x555569b57be0 'a') +-> $$ = nterm item (0x555569b57be0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x557e8d040c30->Object::Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0 } -Next token is token 'a' (0x557e8d040c30 'a') -Shifting token 'a' (0x557e8d040c30 'a') +0x555569b57c30->Object::Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0 } +Next token is token 'a' (0x555569b57c30 'a') +Shifting token 'a' (0x555569b57c30 'a') Entering state 1 Stack now 0 10 10 10 1 Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x557e8d040c30 'a') --> $$ = nterm item (0x557e8d040c30 'a') + $1 = token 'a' (0x555569b57c30 'a') +-> $$ = nterm item (0x555569b57c30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x557e8d040c80->Object::Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0, 0x557e8d040c30 } -Next token is token 'p' (0x557e8d040c80 'p'Exception caught: cleaning lookahead and stack -0x557e8d040c80->Object::~Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0, 0x557e8d040c30, 0x557e8d040c80 } -0x557e8d040c30->Object::~Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0, 0x557e8d040c30 } -0x557e8d040be0->Object::~Object { 0x557e8d040b40, 0x557e8d040b90, 0x557e8d040be0 } -0x557e8d040b90->Object::~Object { 0x557e8d040b40, 0x557e8d040b90 } -0x557e8d040b40->Object::~Object { 0x557e8d040b40 } +0x555569b57c80->Object::Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0, 0x555569b57c30 } +Next token is token 'p' (0x555569b57c80 'p'Exception caught: cleaning lookahead and stack +0x555569b57c80->Object::~Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0, 0x555569b57c30, 0x555569b57c80 } +0x555569b57c30->Object::~Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0, 0x555569b57c30 } +0x555569b57be0->Object::~Object { 0x555569b57b40, 0x555569b57b90, 0x555569b57be0 } +0x555569b57b90->Object::~Object { 0x555569b57b40, 0x555569b57b90 } +0x555569b57b40->Object::~Object { 0x555569b57b40 } exception caught: printer end { } -stderr: ./c++.at:1361: grep '^exception caught: printer$' stderr -757. glr-regression.at:1679: ok -stdout: - -stderr: stdout: exception caught: printer ./c++.at:1361: $PREPARSER ./input aaaae -./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1785: $PREPARSER ./glr-regr15 -stderr: +./glr-regression.at:946: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: exception caught: syntax error ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -749. glr-regression.at:1176: ok -Ambiguity detected. -Option 1, - ambiguity -> - ambiguity1 -> - -Option 2, - ambiguity -> - ambiguity2 -> - -syntax is ambiguous -./glr-regression.at:1785: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +740. glr-regression.at:946: ok ./c++.at:1361: $PREPARSER ./input aaaaE -759. glr-regression.at:1785: stderr: - ok +stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr - ./c++.at:1361: $PREPARSER ./input aaaaT stderr: -./glr-regression.at:1862: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1361: $PREPARSER ./input aaaaR stderr: ./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -765. glr-regression.at:1964: testing Uninitialized location when reporting ambiguity: glr.c api.pure ... -./glr-regression.at:1964: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.c glr-regr17.y +756. glr-regression.at:1678: testing Incorrect lookahead during nondeterministic GLR: glr.c ... +./glr-regression.at:1678: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.c glr-regr14.y +755. glr-regression.at:1447: testing Incorrect lookahead during deterministic GLR: glr2.cc ... ======== Testing with C++ standard flags: '' ./c++.at:1361: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -767. glr-regression.at:1966: testing Uninitialized location when reporting ambiguity: glr2.cc ... -766. glr-regression.at:1965: testing Uninitialized location when reporting ambiguity: glr.cc ... -./glr-regression.at:1966: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y -./glr-regression.at:1965: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y -768. glr-regression.at:2035: testing Missed %merge type warnings when LHS type is declared later: glr.c ... -./glr-regression.at:2035: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y -768. glr-regression.at:2035: ok -./glr-regression.at:1966: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS - -./glr-regression.at:1964: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr17 glr-regr17.c $LIBS -./glr-regression.at:1965: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS -769. glr-regression.at:2036: testing Missed %merge type warnings when LHS type is declared later: glr.cc ... -./glr-regression.at:2036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y -769. glr-regression.at:2036: ok +./glr-regression.at:1447: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr13.cc glr-regr13.y +./glr-regression.at:1678: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr14 glr-regr14.c $LIBS +./glr-regression.at:1447: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr13 glr-regr13.cc $LIBS stderr: stdout: -./c++.at:858: $PREPARSER ./input +./c++.at:857: $PREPARSER ./input stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:857: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:858: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:1360: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaap +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input --debug aaaap +stderr: +stderr: +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x556dbd266b40->Object::Object { } +Next token is token 'a' (0x556dbd266b40 'a') +Shifting token 'a' (0x556dbd266b40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556dbd266b40 'a') +-> $$ = nterm item (0x556dbd266b40 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x556dbd266b90->Object::Object { 0x556dbd266b40 } +Next token is token 'a' (0x556dbd266b90 'a') +Shifting token 'a' (0x556dbd266b90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556dbd266b90 'a') +-> $$ = nterm item (0x556dbd266b90 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x556dbd266be0->Object::Object { 0x556dbd266b40, 0x556dbd266b90 } +Next token is token 'a' (0x556dbd266be0 'a') +Shifting token 'a' (0x556dbd266be0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556dbd266be0 'a') +-> $$ = nterm item (0x556dbd266be0 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x556dbd266c30->Object::Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0 } +Next token is token 'a' (0x556dbd266c30 'a') +Shifting token 'a' (0x556dbd266c30 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556dbd266c30 'a') +-> $$ = nterm item (0x556dbd266c30 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x556dbd266c80->Object::Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0, 0x556dbd266c30 } +Next token is token 'p' (0x556dbd266c80 'p'Exception caught: cleaning lookahead and stack +0x556dbd266c80->Object::~Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0, 0x556dbd266c30, 0x556dbd266c80 } +0x556dbd266c30->Object::~Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0, 0x556dbd266c30 } +0x556dbd266be0->Object::~Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0 } +0x556dbd266b90->Object::~Object { 0x556dbd266b40, 0x556dbd266b90 } +0x556dbd266b40->Object::~Object { 0x556dbd266b40 } +exception caught: printer +end { } +stdout: +./glr-regression.at:1038: $PREPARSER ./glr-regr9 +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stderr: +memory exhausted +./glr-regression.at:1038: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +Starting parse +Entering state 0 +Stack now 0 +Reading a token +0x556dbd266b40->Object::Object { } +Next token is token 'a' (0x556dbd266b40 'a') +Shifting token 'a' (0x556dbd266b40 'a') +Entering state 2 +Stack now 0 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556dbd266b40 'a') +-> $$ = nterm item (0x556dbd266b40 'a') +Entering state 11 +Stack now 0 11 +Reading a token +0x556dbd266b90->Object::Object { 0x556dbd266b40 } +Next token is token 'a' (0x556dbd266b90 'a') +Shifting token 'a' (0x556dbd266b90 'a') +Entering state 2 +Stack now 0 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556dbd266b90 'a') +-> $$ = nterm item (0x556dbd266b90 'a') +Entering state 11 +Stack now 0 11 11 +Reading a token +0x556dbd266be0->Object::Object { 0x556dbd266b40, 0x556dbd266b90 } +Next token is token 'a' (0x556dbd266be0 'a') +Shifting token 'a' (0x556dbd266be0 'a') +Entering state 2 +Stack now 0 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556dbd266be0 'a') +-> $$ = nterm item (0x556dbd266be0 'a') +Entering state 11 +Stack now 0 11 11 11 +Reading a token +0x556dbd266c30->Object::Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0 } +Next token is token 'a' (0x556dbd266c30 'a') +Shifting token 'a' (0x556dbd266c30 'a') +Entering state 2 +Stack now 0 11 11 11 2 +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x556dbd266c30 'a') +-> $$ = nterm item (0x556dbd266c30 'a') +Entering state 11 +Stack now 0 11 11 11 11 +Reading a token +0x556dbd266c80->Object::Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0, 0x556dbd266c30 } +Next token is token 'p' (0x556dbd266c80 'p'Exception caught: cleaning lookahead and stack +0x556dbd266c80->Object::~Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0, 0x556dbd266c30, 0x556dbd266c80 } +0x556dbd266c30->Object::~Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0, 0x556dbd266c30 } +0x556dbd266be0->Object::~Object { 0x556dbd266b40, 0x556dbd266b90, 0x556dbd266be0 } +0x556dbd266b90->Object::~Object { 0x556dbd266b40, 0x556dbd266b90 } +0x556dbd266b40->Object::~Object { 0x556dbd266b40 } +exception caught: printer +end { } +./c++.at:1360: grep '^exception caught: printer$' stderr +stdout: +exception caught: printer +743. glr-regression.at:1038: ok +./c++.at:1360: $PREPARSER ./input aaaae +stderr: +exception caught: syntax error +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./glr-regression.at:1311: $PREPARSER ./glr-regr12 +stderr: +./glr-regression.at:1311: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr + +./c++.at:1360: $PREPARSER ./input aaaaE +751. glr-regression.at:1311: ok +stderr: +exception caught: syntax error, unexpected end of file, expecting 'a' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaT +stderr: +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1360: $PREPARSER ./input aaaaR stderr: -======== Testing with C++ standard flags: '' +./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +685. c++.at:1360: ok stdout: -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./c++.at:1066: $PREPARSER ./input < in +./glr-regression.at:1445: $PREPARSER ./glr-regr13 stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -770. glr-regression.at:2037: testing Missed %merge type warnings when LHS type is declared later: glr2.cc ... -./c++.at:1066: $PREPARSER ./input < in -./glr-regression.at:2037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +stdout: +./glr-regression.at:1104: $PREPARSER ./glr-regr10 +./glr-regression.at:1445: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -770. glr-regression.at:2037: ok -======== Testing with C++ standard flags: '' -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./glr-regression.at:1104: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +753. glr-regression.at:1445: ok + +746. glr-regression.at:1104: ok +757. glr-regression.at:1679: testing Incorrect lookahead during nondeterministic GLR: glr.cc ... +./glr-regression.at:1679: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y +758. glr-regression.at:1680: testing Incorrect lookahead during nondeterministic GLR: glr2.cc ... +./glr-regression.at:1680: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr14.cc glr-regr14.y stderr: stdout: -./c++.at:1066: ./check -771. glr-regression.at:2149: testing Ambiguity reports: glr.c ... -./glr-regression.at:2149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS -./glr-regression.at:2149: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS + +./c++.at:1066: $PREPARSER ./input < in +stderr: stderr: stdout: +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaas stderr: exception caught: reduction ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $PREPARSER ./input < in ./c++.at:1362: $PREPARSER ./input aaaal stderr: exception caught: yylex ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +759. glr-regression.at:1785: testing Leaked semantic values when reporting ambiguity: glr.c ... +./glr-regression.at:1785: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.c glr-regr15.y ./c++.at:1362: $PREPARSER ./input i stderr: exception caught: initial-action ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +error: invalid expression +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaap stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +761. glr-regression.at:1787: testing Leaked semantic values when reporting ambiguity: glr2.cc ... +./glr-regression.at:1787: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y +./c++.at:1066: $PREPARSER ./input < in ./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7fffeb769397->Object::Object { } -0x7fffeb769420->Object::Object { 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'a' (0x7fffeb769420 'a') -0x7fffeb769370->Object::Object { 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x7fffeb769370, 0x7fffeb769420 } -Shifting token 'a' (0x7fffeb769370 'a') -0x5649078b3ee0->Object::Object { 0x7fffeb769370 } -0x7fffeb769370->Object::~Object { 0x5649078b3ee0, 0x7fffeb769370 } +0x7ffeebc7cd17->Object::Object { } +0x7ffeebc7cda0->Object::Object { 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'a' (0x7ffeebc7cda0 'a') +0x7ffeebc7ccf0->Object::Object { 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x7ffeebc7ccf0, 0x7ffeebc7cda0 } +Shifting token 'a' (0x7ffeebc7ccf0 'a') +0x5602c3111ee0->Object::Object { 0x7ffeebc7ccf0 } +0x7ffeebc7ccf0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7ccf0 } Entering state 2 Stack now 0 2 -0x7fffeb769440->Object::Object { 0x5649078b3ee0 } +0x7ffeebc7cdc0->Object::Object { 0x5602c3111ee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5649078b3ee0 'a') --> $$ = nterm item (0x7fffeb769440 'a') -0x5649078b3ee0->Object::~Object { 0x5649078b3ee0, 0x7fffeb769440 } -0x5649078b3ee0->Object::Object { 0x7fffeb769440 } -0x7fffeb769440->Object::~Object { 0x5649078b3ee0, 0x7fffeb769440 } + $1 = token 'a' (0x5602c3111ee0 'a') +-> $$ = nterm item (0x7ffeebc7cdc0 'a') +0x5602c3111ee0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7cdc0 } +0x5602c3111ee0->Object::Object { 0x7ffeebc7cdc0 } +0x7ffeebc7cdc0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7cdc0 } Entering state 11 Stack now 0 11 Reading a token -0x7fffeb769397->Object::Object { 0x5649078b3ee0 } -0x7fffeb769420->Object::Object { 0x5649078b3ee0, 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x5649078b3ee0, 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'a' (0x7fffeb769420 'a') -0x7fffeb769370->Object::Object { 0x5649078b3ee0, 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x5649078b3ee0, 0x7fffeb769370, 0x7fffeb769420 } -Shifting token 'a' (0x7fffeb769370 'a') -0x5649078b3f00->Object::Object { 0x5649078b3ee0, 0x7fffeb769370 } -0x7fffeb769370->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769370 } +0x7ffeebc7cd17->Object::Object { 0x5602c3111ee0 } +0x7ffeebc7cda0->Object::Object { 0x5602c3111ee0, 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'a' (0x7ffeebc7cda0 'a') +0x7ffeebc7ccf0->Object::Object { 0x5602c3111ee0, 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7ccf0, 0x7ffeebc7cda0 } +Shifting token 'a' (0x7ffeebc7ccf0 'a') +0x5602c3111f00->Object::Object { 0x5602c3111ee0, 0x7ffeebc7ccf0 } +0x7ffeebc7ccf0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7ccf0 } Entering state 2 Stack now 0 11 2 -0x7fffeb769440->Object::Object { 0x5649078b3ee0, 0x5649078b3f00 } +0x7ffeebc7cdc0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5649078b3f00 'a') --> $$ = nterm item (0x7fffeb769440 'a') -0x5649078b3f00->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769440 } -0x5649078b3f00->Object::Object { 0x5649078b3ee0, 0x7fffeb769440 } -0x7fffeb769440->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769440 } + $1 = token 'a' (0x5602c3111f00 'a') +-> $$ = nterm item (0x7ffeebc7cdc0 'a') +0x5602c3111f00->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cdc0 } +0x5602c3111f00->Object::Object { 0x5602c3111ee0, 0x7ffeebc7cdc0 } +0x7ffeebc7cdc0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cdc0 } Entering state 11 Stack now 0 11 11 Reading a token -0x7fffeb769397->Object::Object { 0x5649078b3ee0, 0x5649078b3f00 } -0x7fffeb769420->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'a' (0x7fffeb769420 'a') -0x7fffeb769370->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769370, 0x7fffeb769420 } -Shifting token 'a' (0x7fffeb769370 'a') -0x5649078b3f20->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769370 } -0x7fffeb769370->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769370 } +0x7ffeebc7cd17->Object::Object { 0x5602c3111ee0, 0x5602c3111f00 } +0x7ffeebc7cda0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'a' (0x7ffeebc7cda0 'a') +0x7ffeebc7ccf0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7ccf0, 0x7ffeebc7cda0 } +Shifting token 'a' (0x7ffeebc7ccf0 'a') +0x5602c3111f20->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7ccf0 } +0x7ffeebc7ccf0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7ccf0 } Entering state 2 Stack now 0 11 11 2 -0x7fffeb769440->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20 } +0x7ffeebc7cdc0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5649078b3f20 'a') --> $$ = nterm item (0x7fffeb769440 'a') -0x5649078b3f20->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769440 } -0x5649078b3f20->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769440 } -0x7fffeb769440->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769440 } + $1 = token 'a' (0x5602c3111f20 'a') +-> $$ = nterm item (0x7ffeebc7cdc0 'a') +0x5602c3111f20->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cdc0 } +0x5602c3111f20->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cdc0 } +0x7ffeebc7cdc0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cdc0 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7fffeb769397->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20 } -0x7fffeb769420->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'a' (0x7fffeb769420 'a') -0x7fffeb769370->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769370, 0x7fffeb769420 } -Shifting token 'a' (0x7fffeb769370 'a') -0x5649078b3f40->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769370 } -0x7fffeb769370->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769370 } +0x7ffeebc7cd17->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20 } +0x7ffeebc7cda0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'a' (0x7ffeebc7cda0 'a') +0x7ffeebc7ccf0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7ccf0, 0x7ffeebc7cda0 } +Shifting token 'a' (0x7ffeebc7ccf0 'a') +0x5602c3111f40->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7ccf0 } +0x7ffeebc7ccf0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7ccf0 } Entering state 2 Stack now 0 11 11 11 2 -0x7fffeb769440->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40 } +0x7ffeebc7cdc0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5649078b3f40 'a') --> $$ = nterm item (0x7fffeb769440 'a') -0x5649078b3f40->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769440 } -0x5649078b3f40->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769440 } -0x7fffeb769440->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769440 } + $1 = token 'a' (0x5602c3111f40 'a') +-> $$ = nterm item (0x7ffeebc7cdc0 'a') +0x5602c3111f40->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cdc0 } +0x5602c3111f40->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cdc0 } +0x7ffeebc7cdc0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cdc0 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7fffeb769397->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40 } -0x7fffeb769420->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'p' (0x7fffeb769420 'p'Exception caught: cleaning lookahead and stack -0x5649078b3f40->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769420 } -0x5649078b3f20->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769420 } -0x5649078b3f00->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769420 } -0x5649078b3ee0->Object::~Object { 0x5649078b3ee0, 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x7fffeb769420 } +0x7ffeebc7cd17->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40 } +0x7ffeebc7cda0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'p' (0x7ffeebc7cda0 'p'Exception caught: cleaning lookahead and stack +0x5602c3111f40->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cda0 } +0x5602c3111f20->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cda0 } +0x5602c3111f00->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cda0 } +0x5602c3111ee0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x7ffeebc7cda0 } exception caught: printer end { } ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +error: invalid character +./glr-regression.at:1785: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr15 glr-regr15.c $LIBS +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +760. glr-regression.at:1786: testing Leaked semantic values when reporting ambiguity: glr.cc ... +./glr-regression.at:1786: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr15.cc glr-regr15.y stderr: +stderr: +./glr-regression.at:1679: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS Starting parse Entering state 0 Stack now 0 Reading a token -0x7fffeb769397->Object::Object { } -0x7fffeb769420->Object::Object { 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'a' (0x7fffeb769420 'a') -0x7fffeb769370->Object::Object { 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x7fffeb769370, 0x7fffeb769420 } -Shifting token 'a' (0x7fffeb769370 'a') -0x5649078b3ee0->Object::Object { 0x7fffeb769370 } -0x7fffeb769370->Object::~Object { 0x5649078b3ee0, 0x7fffeb769370 } +0x7ffeebc7cd17->Object::Object { } +0x7ffeebc7cda0->Object::Object { 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'a' (0x7ffeebc7cda0 'a') +0x7ffeebc7ccf0->Object::Object { 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x7ffeebc7ccf0, 0x7ffeebc7cda0 } +Shifting token 'a' (0x7ffeebc7ccf0 'a') +0x5602c3111ee0->Object::Object { 0x7ffeebc7ccf0 } +0x7ffeebc7ccf0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7ccf0 } Entering state 2 Stack now 0 2 -0x7fffeb769440->Object::Object { 0x5649078b3ee0 } +0x7ffeebc7cdc0->Object::Object { 0x5602c3111ee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5649078b3ee0 'a') --> $$ = nterm item (0x7fffeb769440 'a') -0x5649078b3ee0->Object::~Object { 0x5649078b3ee0, 0x7fffeb769440 } -0x5649078b3ee0->Object::Object { 0x7fffeb769440 } -0x7fffeb769440->Object::~Object { 0x5649078b3ee0, 0x7fffeb769440 } + $1 = token 'a' (0x5602c3111ee0 'a') +-> $$ = nterm item (0x7ffeebc7cdc0 'a') +0x5602c3111ee0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7cdc0 } +0x5602c3111ee0->Object::Object { 0x7ffeebc7cdc0 } +0x7ffeebc7cdc0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7cdc0 } Entering state 11 Stack now 0 11 Reading a token -0x7fffeb769397->Object::Object { 0x5649078b3ee0 } -0x7fffeb769420->Object::Object { 0x5649078b3ee0, 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x5649078b3ee0, 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'a' (0x7fffeb769420 'a') -0x7fffeb769370->Object::Object { 0x5649078b3ee0, 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x5649078b3ee0, 0x7fffeb769370, 0x7fffeb769420 } -Shifting token 'a' (0x7fffeb769370 'a') -0x5649078b3f00->Object::Object { 0x5649078b3ee0, 0x7fffeb769370 } -0x7fffeb769370->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769370 } +0x7ffeebc7cd17->Object::Object { 0x5602c3111ee0 } +0x7ffeebc7cda0->Object::Object { 0x5602c3111ee0, 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'a' (0x7ffeebc7cda0 'a') +0x7ffeebc7ccf0->Object::Object { 0x5602c3111ee0, 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7ccf0, 0x7ffeebc7cda0 } +Shifting token 'a' (0x7ffeebc7ccf0 'a') +0x5602c3111f00->Object::Object { 0x5602c3111ee0, 0x7ffeebc7ccf0 } +0x7ffeebc7ccf0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7ccf0 } Entering state 2 Stack now 0 11 2 -0x7fffeb769440->Object::Object { 0x5649078b3ee0, 0x5649078b3f00 } +0x7ffeebc7cdc0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5649078b3f00 'a') --> $$ = nterm item (0x7fffeb769440 'a') -0x5649078b3f00->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769440 } -0x5649078b3f00->Object::Object { 0x5649078b3ee0, 0x7fffeb769440 } -0x7fffeb769440->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769440 } + $1 = token 'a' (0x5602c3111f00 'a') +-> $$ = nterm item (0x7ffeebc7cdc0 'a') +0x5602c3111f00->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cdc0 } +0x5602c3111f00->Object::Object { 0x5602c3111ee0, 0x7ffeebc7cdc0 } +0x7ffeebc7cdc0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cdc0 } Entering state 11 Stack now 0 11 11 Reading a token -0x7fffeb769397->Object::Object { 0x5649078b3ee0, 0x5649078b3f00 } -0x7fffeb769420->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'a' (0x7fffeb769420 'a') -0x7fffeb769370->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769370, 0x7fffeb769420 } -Shifting token 'a' (0x7fffeb769370 'a') -0x5649078b3f20->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769370 } -0x7fffeb769370->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769370 } +0x7ffeebc7cd17->Object::Object { 0x5602c3111ee0, 0x5602c3111f00 } +0x7ffeebc7cda0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'a' (0x7ffeebc7cda0 'a') +0x7ffeebc7ccf0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7ccf0, 0x7ffeebc7cda0 } +Shifting token 'a' (0x7ffeebc7ccf0 'a') +0x5602c3111f20->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7ccf0 } +0x7ffeebc7ccf0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7ccf0 } Entering state 2 Stack now 0 11 11 2 -0x7fffeb769440->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20 } +0x7ffeebc7cdc0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5649078b3f20 'a') --> $$ = nterm item (0x7fffeb769440 'a') -0x5649078b3f20->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769440 } -0x5649078b3f20->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769440 } -0x7fffeb769440->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769440 } + $1 = token 'a' (0x5602c3111f20 'a') +-> $$ = nterm item (0x7ffeebc7cdc0 'a') +0x5602c3111f20->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cdc0 } +0x5602c3111f20->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cdc0 } +0x7ffeebc7cdc0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cdc0 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7fffeb769397->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20 } -0x7fffeb769420->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'a' (0x7fffeb769420 'a') -0x7fffeb769370->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769370, 0x7fffeb769420 } -Shifting token 'a' (0x7fffeb769370 'a') -0x5649078b3f40->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769370 } -0x7fffeb769370->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769370 } +0x7ffeebc7cd17->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20 } +0x7ffeebc7cda0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'a' (0x7ffeebc7cda0 'a') +0x7ffeebc7ccf0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7ccf0, 0x7ffeebc7cda0 } +Shifting token 'a' (0x7ffeebc7ccf0 'a') +0x5602c3111f40->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7ccf0 } +0x7ffeebc7ccf0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7ccf0 } Entering state 2 Stack now 0 11 11 11 2 -0x7fffeb769440->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40 } +0x7ffeebc7cdc0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x5649078b3f40 'a') --> $$ = nterm item (0x7fffeb769440 'a') -0x5649078b3f40->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769440 } -0x5649078b3f40->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769440 } -0x7fffeb769440->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769440 } + $1 = token 'a' (0x5602c3111f40 'a') +-> $$ = nterm item (0x7ffeebc7cdc0 'a') +0x5602c3111f40->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cdc0 } +0x5602c3111f40->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cdc0 } +0x7ffeebc7cdc0->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cdc0 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7fffeb769397->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40 } -0x7fffeb769420->Object::Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769397 } -0x7fffeb769397->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769397, 0x7fffeb769420 } -Next token is token 'p' (0x7fffeb769420 'p'Exception caught: cleaning lookahead and stack -0x5649078b3f40->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x5649078b3f40, 0x7fffeb769420 } -0x5649078b3f20->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x5649078b3f20, 0x7fffeb769420 } -0x5649078b3f00->Object::~Object { 0x5649078b3ee0, 0x5649078b3f00, 0x7fffeb769420 } -0x5649078b3ee0->Object::~Object { 0x5649078b3ee0, 0x7fffeb769420 } -0x7fffeb769420->Object::~Object { 0x7fffeb769420 } +0x7ffeebc7cd17->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40 } +0x7ffeebc7cda0->Object::Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cd17 } +0x7ffeebc7cd17->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cd17, 0x7ffeebc7cda0 } +Next token is token 'p' (0x7ffeebc7cda0 'p'Exception caught: cleaning lookahead and stack +0x5602c3111f40->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x5602c3111f40, 0x7ffeebc7cda0 } +0x5602c3111f20->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x5602c3111f20, 0x7ffeebc7cda0 } +0x5602c3111f00->Object::~Object { 0x5602c3111ee0, 0x5602c3111f00, 0x7ffeebc7cda0 } +0x5602c3111ee0->Object::~Object { 0x5602c3111ee0, 0x7ffeebc7cda0 } +0x7ffeebc7cda0->Object::~Object { 0x7ffeebc7cda0 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr stdout: +stdout: +./glr-regression.at:1680: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr14 glr-regr14.cc $LIBS exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae +./glr-regression.at:1678: $PREPARSER ./glr-regr14 +======== Testing with C++ standard flags: '' +stderr: stderr: exception caught: syntax error ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1678: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o check check.cc $LIBS +./glr-regression.at:1787: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS ./c++.at:1362: $PREPARSER ./input aaaaE +756. glr-regression.at:1678: ok stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1786: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr15 glr-regr15.cc $LIBS ./c++.at:1362: $PREPARSER ./input aaaaT stderr: + ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaR stderr: ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ======== Testing with C++ standard flags: '' ./c++.at:1362: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +762. glr-regression.at:1860: testing Leaked lookahead after nondeterministic parse syntax error: glr.c ... +./glr-regression.at:1860: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.c glr-regr16.y stderr: stdout: -./glr-regression.at:1861: $PREPARSER ./glr-regr16 +./c++.at:1066: ./check +./c++.at:1066: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc scan.cc $LIBS +./glr-regression.at:1860: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr16 glr-regr16.c $LIBS +stderr: +stdout: +./glr-regression.at:1446: $PREPARSER ./glr-regr13 +stderr: +./glr-regression.at:1446: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +754. glr-regression.at:1446: ok + +763. glr-regression.at:1861: testing Leaked lookahead after nondeterministic parse syntax error: glr.cc ... +./glr-regression.at:1861: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y +./glr-regression.at:1861: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS +stderr: +stdout: +./glr-regression.at:1176: $PREPARSER ./glr-regr11 +stderr: +./glr-regression.at:1176: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +749. glr-regression.at:1176: ok + +764. glr-regression.at:1862: testing Leaked lookahead after nondeterministic parse syntax error: glr2.cc ... +./glr-regression.at:1862: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr16.cc glr-regr16.y +./glr-regression.at:1862: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr16 glr-regr16.cc $LIBS +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./glr-regression.at:1860: $PREPARSER ./glr-regr16 stderr: syntax error -./glr-regression.at:1861: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -763. glr-regression.at:1861: ok +./glr-regression.at:1860: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +762. glr-regression.at:1860: ok -772. glr-regression.at:2150: testing Ambiguity reports: glr.cc ... -./glr-regression.at:2150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +765. glr-regression.at:1964: testing Uninitialized location when reporting ambiguity: glr.c api.pure ... +./glr-regression.at:1964: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.c glr-regr17.y stderr: stdout: -./glr-regression.at:1786: $PREPARSER ./glr-regr15 +./glr-regression.at:1785: $PREPARSER ./glr-regr15 stderr: -./glr-regression.at:2150: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS Ambiguity detected. Option 1, ambiguity -> @@ -270054,369 +270147,252 @@ ambiguity2 -> syntax is ambiguous -./glr-regression.at:1786: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -760. glr-regression.at:1786: ok +./glr-regression.at:1785: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +759. glr-regression.at:1785: ok +./glr-regression.at:1964: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o glr-regr17 glr-regr17.c $LIBS + +766. glr-regression.at:1965: testing Uninitialized location when reporting ambiguity: glr.cc ... +./glr-regression.at:1965: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y stderr: stdout: -./c++.at:1363: $PREPARSER ./input aaaas +./glr-regression.at:1312: $PREPARSER ./glr-regr12 stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1312: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +752. glr-regression.at:1312: ok -./c++.at:1363: $PREPARSER ./input aaaal +./glr-regression.at:1965: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS +767. glr-regression.at:1966: testing Uninitialized location when reporting ambiguity: glr2.cc ... +./glr-regression.at:1966: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -rall -o glr-regr17.cc glr-regr17.y +./glr-regression.at:1966: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o glr-regr17 glr-regr17.cc $LIBS +stderr: +stdout: +./c++.at:1361: $PREPARSER ./input aaaas +stderr: +exception caught: reduction +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./glr-regression.at:1786: $PREPARSER ./glr-regr15 +./c++.at:1361: $PREPARSER ./input aaaal stderr: +stderr: +Ambiguity detected. +Option 1, + ambiguity -> + ambiguity1 -> + +Option 2, + ambiguity -> + ambiguity2 -> + +syntax is ambiguous +./glr-regression.at:1786: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +760. glr-regression.at:1786: ok +./c++.at:1361: $PREPARSER ./input i stderr: exception caught: initial-action -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaap -773. glr-regression.at:2151: testing Ambiguity reports: glr2.cc ... -./glr-regression.at:2151: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input --debug aaaap +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stdout: +./glr-regression.at:1679: $PREPARSER ./glr-regr14 + +./c++.at:1361: $PREPARSER ./input aaaap +stderr: +stderr: +./glr-regression.at:1679: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +757. glr-regression.at:1679: ok +./c++.at:1361: $PREPARSER ./input --debug aaaap stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff9e1cbe6f->Object::Object { } -0x7fff9e1cbf40->Object::Object { 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'a' (0x7fff9e1cbf40 'a') -0x7fff9e1cbe90->Object::Object { 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x7fff9e1cbe90, 0x7fff9e1cbf40 } -Shifting token 'a' (0x7fff9e1cbe90 'a') -0x55b84fcc8ee0->Object::Object { 0x7fff9e1cbe90 } -0x7fff9e1cbe90->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbe90 } +0x55d994fcbb40->Object::Object { } +Next token is token 'a' (0x55d994fcbb40 'a') +Shifting token 'a' (0x55d994fcbb40 'a') Entering state 1 Stack now 0 1 -0x7fff9e1cbf60->Object::Object { 0x55b84fcc8ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b84fcc8ee0 'a') --> $$ = nterm item (0x7fff9e1cbf60 'a') -0x55b84fcc8ee0->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbf60 } -0x55b84fcc8ee0->Object::Object { 0x7fff9e1cbf60 } -0x7fff9e1cbf60->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbf60 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d994fcbb40 'a') +-> $$ = nterm item (0x55d994fcbb40 'a') Entering state 10 Stack now 0 10 Reading a token -0x7fff9e1cbe6f->Object::Object { 0x55b84fcc8ee0 } -0x7fff9e1cbf40->Object::Object { 0x55b84fcc8ee0, 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'a' (0x7fff9e1cbf40 'a') -0x7fff9e1cbe90->Object::Object { 0x55b84fcc8ee0, 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbe90, 0x7fff9e1cbf40 } -Shifting token 'a' (0x7fff9e1cbe90 'a') -0x55b84fcc8f00->Object::Object { 0x55b84fcc8ee0, 0x7fff9e1cbe90 } -0x7fff9e1cbe90->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe90 } +0x55d994fcbb90->Object::Object { 0x55d994fcbb40 } +Next token is token 'a' (0x55d994fcbb90 'a') +Shifting token 'a' (0x55d994fcbb90 'a') Entering state 1 Stack now 0 10 1 -0x7fff9e1cbf60->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b84fcc8f00 'a') --> $$ = nterm item (0x7fff9e1cbf60 'a') -0x55b84fcc8f00->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf60 } -0x55b84fcc8f00->Object::Object { 0x55b84fcc8ee0, 0x7fff9e1cbf60 } -0x7fff9e1cbf60->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf60 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d994fcbb90 'a') +-> $$ = nterm item (0x55d994fcbb90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x7fff9e1cbe6f->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00 } -0x7fff9e1cbf40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'a' (0x7fff9e1cbf40 'a') -0x7fff9e1cbe90->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe90, 0x7fff9e1cbf40 } -Shifting token 'a' (0x7fff9e1cbe90 'a') -0x55b84fcc8f20->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe90 } -0x7fff9e1cbe90->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe90 } +0x55d994fcbbe0->Object::Object { 0x55d994fcbb40, 0x55d994fcbb90 } +Next token is token 'a' (0x55d994fcbbe0 'a') +Shifting token 'a' (0x55d994fcbbe0 'a') Entering state 1 Stack now 0 10 10 1 -0x7fff9e1cbf60->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b84fcc8f20 'a') --> $$ = nterm item (0x7fff9e1cbf60 'a') -0x55b84fcc8f20->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf60 } -0x55b84fcc8f20->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf60 } -0x7fff9e1cbf60->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf60 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d994fcbbe0 'a') +-> $$ = nterm item (0x55d994fcbbe0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff9e1cbe6f->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20 } -0x7fff9e1cbf40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'a' (0x7fff9e1cbf40 'a') -0x7fff9e1cbe90->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe90, 0x7fff9e1cbf40 } -Shifting token 'a' (0x7fff9e1cbe90 'a') -0x55b84fcc8f40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe90 } -0x7fff9e1cbe90->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbe90 } +0x55d994fcbc30->Object::Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0 } +Next token is token 'a' (0x55d994fcbc30 'a') +Shifting token 'a' (0x55d994fcbc30 'a') Entering state 1 Stack now 0 10 10 10 1 -0x7fff9e1cbf60->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b84fcc8f40 'a') --> $$ = nterm item (0x7fff9e1cbf60 'a') -0x55b84fcc8f40->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbf60 } -0x55b84fcc8f40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf60 } -0x7fff9e1cbf60->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbf60 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d994fcbc30 'a') +-> $$ = nterm item (0x55d994fcbc30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff9e1cbe6f->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40 } -0x7fff9e1cbf40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'p' (0x7fff9e1cbf40 'p'Exception caught: cleaning lookahead and stack -0x55b84fcc8f40->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbf40 } -0x55b84fcc8f20->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf40 } -0x55b84fcc8f00->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf40 } -0x55b84fcc8ee0->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x7fff9e1cbf40 } +0x55d994fcbc80->Object::Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0, 0x55d994fcbc30 } +Next token is token 'p' (0x55d994fcbc80 'p'Exception caught: cleaning lookahead and stack +0x55d994fcbc80->Object::~Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0, 0x55d994fcbc30, 0x55d994fcbc80 } +0x55d994fcbc30->Object::~Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0, 0x55d994fcbc30 } +0x55d994fcbbe0->Object::~Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0 } +0x55d994fcbb90->Object::~Object { 0x55d994fcbb40, 0x55d994fcbb90 } +0x55d994fcbb40->Object::~Object { 0x55d994fcbb40 } exception caught: printer end { } -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: Starting parse Entering state 0 Stack now 0 Reading a token -0x7fff9e1cbe6f->Object::Object { } -0x7fff9e1cbf40->Object::Object { 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'a' (0x7fff9e1cbf40 'a') -0x7fff9e1cbe90->Object::Object { 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x7fff9e1cbe90, 0x7fff9e1cbf40 } -Shifting token 'a' (0x7fff9e1cbe90 'a') -0x55b84fcc8ee0->Object::Object { 0x7fff9e1cbe90 } -0x7fff9e1cbe90->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbe90 } +0x55d994fcbb40->Object::Object { } +Next token is token 'a' (0x55d994fcbb40 'a') +Shifting token 'a' (0x55d994fcbb40 'a') Entering state 1 Stack now 0 1 -0x7fff9e1cbf60->Object::Object { 0x55b84fcc8ee0 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b84fcc8ee0 'a') --> $$ = nterm item (0x7fff9e1cbf60 'a') -0x55b84fcc8ee0->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbf60 } -0x55b84fcc8ee0->Object::Object { 0x7fff9e1cbf60 } -0x7fff9e1cbf60->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbf60 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d994fcbb40 'a') +-> $$ = nterm item (0x55d994fcbb40 'a') Entering state 10 Stack now 0 10 Reading a token -0x7fff9e1cbe6f->Object::Object { 0x55b84fcc8ee0 } -0x7fff9e1cbf40->Object::Object { 0x55b84fcc8ee0, 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'a' (0x7fff9e1cbf40 'a') -0x7fff9e1cbe90->Object::Object { 0x55b84fcc8ee0, 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbe90, 0x7fff9e1cbf40 } -Shifting token 'a' (0x7fff9e1cbe90 'a') -0x55b84fcc8f00->Object::Object { 0x55b84fcc8ee0, 0x7fff9e1cbe90 } -0x7fff9e1cbe90->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe90 } +0x55d994fcbb90->Object::Object { 0x55d994fcbb40 } +Next token is token 'a' (0x55d994fcbb90 'a') +Shifting token 'a' (0x55d994fcbb90 'a') Entering state 1 Stack now 0 10 1 -0x7fff9e1cbf60->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b84fcc8f00 'a') --> $$ = nterm item (0x7fff9e1cbf60 'a') -0x55b84fcc8f00->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf60 } -0x55b84fcc8f00->Object::Object { 0x55b84fcc8ee0, 0x7fff9e1cbf60 } -0x7fff9e1cbf60->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf60 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d994fcbb90 'a') +-> $$ = nterm item (0x55d994fcbb90 'a') Entering state 10 Stack now 0 10 10 Reading a token -0x7fff9e1cbe6f->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00 } -0x7fff9e1cbf40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'a' (0x7fff9e1cbf40 'a') -0x7fff9e1cbe90->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe90, 0x7fff9e1cbf40 } -Shifting token 'a' (0x7fff9e1cbe90 'a') -0x55b84fcc8f20->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbe90 } -0x7fff9e1cbe90->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe90 } +0x55d994fcbbe0->Object::Object { 0x55d994fcbb40, 0x55d994fcbb90 } +Next token is token 'a' (0x55d994fcbbe0 'a') +Shifting token 'a' (0x55d994fcbbe0 'a') Entering state 1 Stack now 0 10 10 1 -0x7fff9e1cbf60->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b84fcc8f20 'a') --> $$ = nterm item (0x7fff9e1cbf60 'a') -0x55b84fcc8f20->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf60 } -0x55b84fcc8f20->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf60 } -0x7fff9e1cbf60->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf60 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d994fcbbe0 'a') +-> $$ = nterm item (0x55d994fcbbe0 'a') Entering state 10 Stack now 0 10 10 10 Reading a token -0x7fff9e1cbe6f->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20 } -0x7fff9e1cbf40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'a' (0x7fff9e1cbf40 'a') -0x7fff9e1cbe90->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe90, 0x7fff9e1cbf40 } -Shifting token 'a' (0x7fff9e1cbe90 'a') -0x55b84fcc8f40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbe90 } -0x7fff9e1cbe90->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbe90 } +0x55d994fcbc30->Object::Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0 } +Next token is token 'a' (0x55d994fcbc30 'a') +Shifting token 'a' (0x55d994fcbc30 'a') Entering state 1 Stack now 0 10 10 10 1 -0x7fff9e1cbf60->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40 } -Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55b84fcc8f40 'a') --> $$ = nterm item (0x7fff9e1cbf60 'a') -0x55b84fcc8f40->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbf60 } -0x55b84fcc8f40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf60 } -0x7fff9e1cbf60->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbf60 } +Reducing stack by rule 4 (line 147): + $1 = token 'a' (0x55d994fcbc30 'a') +-> $$ = nterm item (0x55d994fcbc30 'a') Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7fff9e1cbe6f->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40 } -0x7fff9e1cbf40->Object::Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbe6f } -0x7fff9e1cbe6f->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbe6f, 0x7fff9e1cbf40 } -Next token is token 'p' (0x7fff9e1cbf40 'p'Exception caught: cleaning lookahead and stack -0x55b84fcc8f40->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x55b84fcc8f40, 0x7fff9e1cbf40 } -0x55b84fcc8f20->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x55b84fcc8f20, 0x7fff9e1cbf40 } -0x55b84fcc8f00->Object::~Object { 0x55b84fcc8ee0, 0x55b84fcc8f00, 0x7fff9e1cbf40 } -0x55b84fcc8ee0->Object::~Object { 0x55b84fcc8ee0, 0x7fff9e1cbf40 } -0x7fff9e1cbf40->Object::~Object { 0x7fff9e1cbf40 } +0x55d994fcbc80->Object::Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0, 0x55d994fcbc30 } +Next token is token 'p' (0x55d994fcbc80 'p'Exception caught: cleaning lookahead and stack +0x55d994fcbc80->Object::~Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0, 0x55d994fcbc30, 0x55d994fcbc80 } +0x55d994fcbc30->Object::~Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0, 0x55d994fcbc30 } +0x55d994fcbbe0->Object::~Object { 0x55d994fcbb40, 0x55d994fcbb90, 0x55d994fcbbe0 } +0x55d994fcbb90->Object::~Object { 0x55d994fcbb40, 0x55d994fcbb90 } +0x55d994fcbb40->Object::~Object { 0x55d994fcbb40 } exception caught: printer end { } -./c++.at:1363: grep '^exception caught: printer$' stderr -./glr-regression.at:2151: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1361: grep '^exception caught: printer$' stderr +768. glr-regression.at:2035: testing Missed %merge type warnings when LHS type is declared later: glr.c ... +./glr-regression.at:2035: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y + stdout: exception caught: printer -./c++.at:1363: $PREPARSER ./input aaaae -./glr-regression.at:1964: $PREPARSER ./glr-regr17 +./c++.at:1361: $PREPARSER ./input aaaae +768. glr-regression.at:2035: ok stderr: exception caught: syntax error -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Ambiguity detected. -Option 1, - start -> - ambig1 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty1 -> - -Option 2, - start -> - ambig2 -> - sub_ambig2 -> - empty2 -> - 'a' - 'b' - empty2 -> +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -1.1-2.2: syntax is ambiguous -./glr-regression.at:1964: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaE +./c++.at:1361: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -765. glr-regression.at:1964: ok -./c++.at:1363: $PREPARSER ./input aaaaT +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaT stderr: - -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaaR +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1361: $PREPARSER ./input aaaaR stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +770. glr-regression.at:2037: testing Missed %merge type warnings when LHS type is declared later: glr2.cc ... +./glr-regression.at:2037: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y +686. c++.at:1361: ok +770. glr-regression.at:2037: ok + +769. glr-regression.at:2036: testing Missed %merge type warnings when LHS type is declared later: glr.cc ... +./glr-regression.at:2036: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; VALGRIND_OPTS="$VALGRIND_OPTS --leak-check=summary --show-reachable=no"; export VALGRIND_OPTS; bison --color=no -fno-caret -o glr-regr18.c -rall -fcaret glr-regr18.y stderr: stdout: -./glr-regression.at:1447: $PREPARSER ./glr-regr13 -stderr: -774. glr-regression.at:2229: testing Predicates: glr.c ... -./glr-regression.at:2229: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -./glr-regression.at:1447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -755. glr-regression.at:1447: ok +./glr-regression.at:1861: $PREPARSER ./glr-regr16 -./glr-regression.at:2229: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS -775. glr-regression.at:2230: testing Predicates: glr.cc ... -./glr-regression.at:2230: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: +769. glr-regression.at:2036: stderr: + ok stdout: -./glr-regression.at:1680: $PREPARSER ./glr-regr14 +syntax error +./glr-regression.at:1861: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:858: $PREPARSER ./input stderr: stderr: -./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./glr-regression.at:2149: $PREPARSER ./input --debug -stderr: -Starting parse -Entering state 0 -Reading a token -Next token is token 'a' () -Shifting token 'a' () -Entering state 1 -Reading a token -Next token is token 'b' () -Shifting token 'b' () -Entering state 3 -Reducing stack 0 by rule 3 (line 30): - $1 = token 'b' () --> $$ = nterm b () -Entering state 4 -Reading a token -Next token is token 'c' () -Shifting token 'c' () -Entering state 6 -Reducing stack 0 by rule 4 (line 31): --> $$ = nterm d () -Entering state 7 -Reading a token -Now at end of input. -Stack 0 Entering state 7 -Now at end of input. -Splitting off stack 1 from 0. -Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. -Stack 1 Entering state 2 -Now at end of input. -Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. -Merging stack 0 into stack 1. -Stack 1 Entering state 2 -Now at end of input. -Removing dead stacks. -Rename stack 1 -> 0. -On stack 0, shifting token "end of file" () -Stack 0 now in state 5 -Ambiguity detected. -Option 1, - start -> - 'a' - b - 'c' - d - -Option 2, - start -> - 'a' - b - 'c' - d -syntax is ambiguous -Cleanup: popping token "end of file" () -Cleanup: popping unresolved nterm start () -Cleanup: popping nterm d () -Cleanup: popping token 'c' () -Cleanup: popping nterm b () -Cleanup: popping token 'a' () -758. glr-regression.at:1680: ok -./glr-regression.at:2230: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -./glr-regression.at:2149: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -771. glr-regression.at:2149: ok +======== Testing with C++ standard flags: '' +./c++.at:1363: $PREPARSER ./input aaaas +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +763. glr-regression.at:1861: ok +stderr: +exception caught: reduction +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +772. glr-regression.at:2150: testing Ambiguity reports: glr.cc ... +./c++.at:1363: $PREPARSER ./input aaaal +./glr-regression.at:2150: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +771. glr-regression.at:2149: testing Ambiguity reports: glr.c ... +./glr-regression.at:2149: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y -776. glr-regression.at:2231: testing Predicates: glr2.cc ... -./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y -./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +773. glr-regression.at:2151: testing Ambiguity reports: glr2.cc ... +./glr-regression.at:2151: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: -./glr-regression.at:1965: $PREPARSER ./glr-regr17 +./c++.at:1363: $PREPARSER ./input i +./glr-regression.at:1964: $PREPARSER ./glr-regr17 +./glr-regression.at:2149: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +stderr: stderr: +exception caught: initial-action Ambiguity detected. Option 1, start -> @@ -270437,401 +270413,246 @@ empty2 -> 1.1-2.2: syntax is ambiguous -./glr-regression.at:1965: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -766. glr-regression.at:1965: ok -stderr: -stdout: -./c++.at:1360: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaap -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input --debug aaaap -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55fe244a3b40->Object::Object { } -Next token is token 'a' (0x55fe244a3b40 'a') -Shifting token 'a' (0x55fe244a3b40 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fe244a3b40 'a') --> $$ = nterm item (0x55fe244a3b40 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x55fe244a3b90->Object::Object { 0x55fe244a3b40 } -Next token is token 'a' (0x55fe244a3b90 'a') -Shifting token 'a' (0x55fe244a3b90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fe244a3b90 'a') --> $$ = nterm item (0x55fe244a3b90 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x55fe244a3be0->Object::Object { 0x55fe244a3b40, 0x55fe244a3b90 } -Next token is token 'a' (0x55fe244a3be0 'a') -Shifting token 'a' (0x55fe244a3be0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fe244a3be0 'a') --> $$ = nterm item (0x55fe244a3be0 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x55fe244a3c30->Object::Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0 } -Next token is token 'a' (0x55fe244a3c30 'a') -Shifting token 'a' (0x55fe244a3c30 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fe244a3c30 'a') --> $$ = nterm item (0x55fe244a3c30 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x55fe244a3c80->Object::Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0, 0x55fe244a3c30 } -Next token is token 'p' (0x55fe244a3c80 'p'Exception caught: cleaning lookahead and stack -0x55fe244a3c80->Object::~Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0, 0x55fe244a3c30, 0x55fe244a3c80 } -0x55fe244a3c30->Object::~Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0, 0x55fe244a3c30 } -0x55fe244a3be0->Object::~Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0 } -0x55fe244a3b90->Object::~Object { 0x55fe244a3b40, 0x55fe244a3b90 } -0x55fe244a3b40->Object::~Object { 0x55fe244a3b40 } -exception caught: printer -end { } -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -Starting parse -Entering state 0 -Stack now 0 -Reading a token -0x55fe244a3b40->Object::Object { } -Next token is token 'a' (0x55fe244a3b40 'a') -Shifting token 'a' (0x55fe244a3b40 'a') -Entering state 2 -Stack now 0 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fe244a3b40 'a') --> $$ = nterm item (0x55fe244a3b40 'a') -Entering state 11 -Stack now 0 11 -Reading a token -0x55fe244a3b90->Object::Object { 0x55fe244a3b40 } -Next token is token 'a' (0x55fe244a3b90 'a') -Shifting token 'a' (0x55fe244a3b90 'a') -Entering state 2 -Stack now 0 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fe244a3b90 'a') --> $$ = nterm item (0x55fe244a3b90 'a') -Entering state 11 -Stack now 0 11 11 -Reading a token -0x55fe244a3be0->Object::Object { 0x55fe244a3b40, 0x55fe244a3b90 } -Next token is token 'a' (0x55fe244a3be0 'a') -Shifting token 'a' (0x55fe244a3be0 'a') -Entering state 2 -Stack now 0 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fe244a3be0 'a') --> $$ = nterm item (0x55fe244a3be0 'a') -Entering state 11 -Stack now 0 11 11 11 -Reading a token -0x55fe244a3c30->Object::Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0 } -Next token is token 'a' (0x55fe244a3c30 'a') -Shifting token 'a' (0x55fe244a3c30 'a') -Entering state 2 -Stack now 0 11 11 11 2 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x55fe244a3c30 'a') --> $$ = nterm item (0x55fe244a3c30 'a') -Entering state 11 -Stack now 0 11 11 11 11 -Reading a token -0x55fe244a3c80->Object::Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0, 0x55fe244a3c30 } -Next token is token 'p' (0x55fe244a3c80 'p'Exception caught: cleaning lookahead and stack -0x55fe244a3c80->Object::~Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0, 0x55fe244a3c30, 0x55fe244a3c80 } -0x55fe244a3c30->Object::~Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0, 0x55fe244a3c30 } -0x55fe244a3be0->Object::~Object { 0x55fe244a3b40, 0x55fe244a3b90, 0x55fe244a3be0 } -0x55fe244a3b90->Object::~Object { 0x55fe244a3b40, 0x55fe244a3b90 } -0x55fe244a3b40->Object::~Object { 0x55fe244a3b40 } -exception caught: printer -end { } -./c++.at:1360: grep '^exception caught: printer$' stderr -stdout: -exception caught: printer -./c++.at:1360: $PREPARSER ./input aaaae -stderr: -exception caught: syntax error -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaE -stderr: -exception caught: syntax error, unexpected end of file, expecting 'a' -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaT -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1360: $PREPARSER ./input aaaaR -stderr: -./c++.at:1360: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -685. c++.at:1360: ok -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS -stderr: -stdout: -./c++.at:1361: $PREPARSER ./input aaaas -stderr: -stdout: -stderr: -exception caught: reduction -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Nwin -stderr: -./c++.at:1361: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input i -stderr: -exception caught: initial-action -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Owin -stderr: -syntax error, unexpected 'n', expecting 'o' -./c++.at:1361: $PREPARSER ./input aaaap -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: $PREPARSER ./input Owio -stderr: -./c++.at:1361: $PREPARSER ./input --debug aaaap +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1964: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaap stderr: -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2150: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +765. glr-regression.at:1964: ok +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input --debug aaaap +./glr-regression.at:2151: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +774. glr-regression.at:2229: testing Predicates: glr.c ... stderr: +./glr-regression.at:2229: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.c input.y Starting parse Entering state 0 Stack now 0 Reading a token -0x555c5d0f8b40->Object::Object { } -Next token is token 'a' (0x555c5d0f8b40 'a') -Shifting token 'a' (0x555c5d0f8b40 'a') +0x7fffb149a41f->Object::Object { } +0x7fffb149a4f0->Object::Object { 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'a' (0x7fffb149a4f0 'a') +0x7fffb149a440->Object::Object { 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x7fffb149a440, 0x7fffb149a4f0 } +Shifting token 'a' (0x7fffb149a440 'a') +0x55b9344e5ee0->Object::Object { 0x7fffb149a440 } +0x7fffb149a440->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a440 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555c5d0f8b40 'a') --> $$ = nterm item (0x555c5d0f8b40 'a') +0x7fffb149a510->Object::Object { 0x55b9344e5ee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b9344e5ee0 'a') +-> $$ = nterm item (0x7fffb149a510 'a') +0x55b9344e5ee0->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a510 } +0x55b9344e5ee0->Object::Object { 0x7fffb149a510 } +0x7fffb149a510->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a510 } Entering state 10 Stack now 0 10 Reading a token -0x555c5d0f8b90->Object::Object { 0x555c5d0f8b40 } -Next token is token 'a' (0x555c5d0f8b90 'a') -Shifting token 'a' (0x555c5d0f8b90 'a') +0x7fffb149a41f->Object::Object { 0x55b9344e5ee0 } +0x7fffb149a4f0->Object::Object { 0x55b9344e5ee0, 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'a' (0x7fffb149a4f0 'a') +0x7fffb149a440->Object::Object { 0x55b9344e5ee0, 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a440, 0x7fffb149a4f0 } +Shifting token 'a' (0x7fffb149a440 'a') +0x55b9344e5f00->Object::Object { 0x55b9344e5ee0, 0x7fffb149a440 } +0x7fffb149a440->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a440 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555c5d0f8b90 'a') --> $$ = nterm item (0x555c5d0f8b90 'a') +0x7fffb149a510->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b9344e5f00 'a') +-> $$ = nterm item (0x7fffb149a510 'a') +0x55b9344e5f00->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a510 } +0x55b9344e5f00->Object::Object { 0x55b9344e5ee0, 0x7fffb149a510 } +0x7fffb149a510->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a510 } Entering state 10 Stack now 0 10 10 Reading a token -0x555c5d0f8be0->Object::Object { 0x555c5d0f8b40, 0x555c5d0f8b90 } -Next token is token 'a' (0x555c5d0f8be0 'a') -Shifting token 'a' (0x555c5d0f8be0 'a') +0x7fffb149a41f->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00 } +0x7fffb149a4f0->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'a' (0x7fffb149a4f0 'a') +0x7fffb149a440->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a440, 0x7fffb149a4f0 } +Shifting token 'a' (0x7fffb149a440 'a') +0x55b9344e5f20->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a440 } +0x7fffb149a440->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a440 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555c5d0f8be0 'a') --> $$ = nterm item (0x555c5d0f8be0 'a') +0x7fffb149a510->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b9344e5f20 'a') +-> $$ = nterm item (0x7fffb149a510 'a') +0x55b9344e5f20->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a510 } +0x55b9344e5f20->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a510 } +0x7fffb149a510->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a510 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x555c5d0f8c30->Object::Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0 } -Next token is token 'a' (0x555c5d0f8c30 'a') -Shifting token 'a' (0x555c5d0f8c30 'a') +0x7fffb149a41f->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20 } +0x7fffb149a4f0->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'a' (0x7fffb149a4f0 'a') +0x7fffb149a440->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a440, 0x7fffb149a4f0 } +Shifting token 'a' (0x7fffb149a440 'a') +0x55b9344e5f40->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a440 } +0x7fffb149a440->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a440 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555c5d0f8c30 'a') --> $$ = nterm item (0x555c5d0f8c30 'a') +0x7fffb149a510->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b9344e5f40 'a') +-> $$ = nterm item (0x7fffb149a510 'a') +0x55b9344e5f40->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a510 } +0x55b9344e5f40->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a510 } +0x7fffb149a510->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a510 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x555c5d0f8c80->Object::Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0, 0x555c5d0f8c30 } -Next token is token 'p' (0x555c5d0f8c80 'p'Exception caught: cleaning lookahead and stack -0x555c5d0f8c80->Object::~Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0, 0x555c5d0f8c30, 0x555c5d0f8c80 } -0x555c5d0f8c30->Object::~Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0, 0x555c5d0f8c30 } -0x555c5d0f8be0->Object::~Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0 } -0x555c5d0f8b90->Object::~Object { 0x555c5d0f8b40, 0x555c5d0f8b90 } -0x555c5d0f8b40->Object::~Object { 0x555c5d0f8b40 } +0x7fffb149a41f->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40 } +0x7fffb149a4f0->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'p' (0x7fffb149a4f0 'p'Exception caught: cleaning lookahead and stack +0x55b9344e5f40->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a4f0 } +0x55b9344e5f20->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a4f0 } +0x55b9344e5f00->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a4f0 } +0x55b9344e5ee0->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x7fffb149a4f0 } exception caught: printer end { } -stdout: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -./glr-regression.at:2150: $PREPARSER ./input --debug Starting parse Entering state 0 Stack now 0 Reading a token -0x555c5d0f8b40->Object::Object { } -Next token is token 'a' (0x555c5d0f8b40 'a') -Shifting token 'a' (0x555c5d0f8b40 'a') +0x7fffb149a41f->Object::Object { } +0x7fffb149a4f0->Object::Object { 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'a' (0x7fffb149a4f0 'a') +0x7fffb149a440->Object::Object { 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x7fffb149a440, 0x7fffb149a4f0 } +Shifting token 'a' (0x7fffb149a440 'a') +0x55b9344e5ee0->Object::Object { 0x7fffb149a440 } +0x7fffb149a440->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a440 } Entering state 1 Stack now 0 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555c5d0f8b40 'a') --> $$ = nterm item (0x555c5d0f8b40 'a') +0x7fffb149a510->Object::Object { 0x55b9344e5ee0 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b9344e5ee0 'a') +-> $$ = nterm item (0x7fffb149a510 'a') +0x55b9344e5ee0->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a510 } +0x55b9344e5ee0->Object::Object { 0x7fffb149a510 } +0x7fffb149a510->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a510 } Entering state 10 Stack now 0 10 Reading a token -0x555c5d0f8b90->Object::Object { 0x555c5d0f8b40 } -Next token is token 'a' (0x555c5d0f8b90 'a') -Shifting token 'a' (0x555c5d0f8b90 'a') +0x7fffb149a41f->Object::Object { 0x55b9344e5ee0 } +0x7fffb149a4f0->Object::Object { 0x55b9344e5ee0, 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'a' (0x7fffb149a4f0 'a') +0x7fffb149a440->Object::Object { 0x55b9344e5ee0, 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a440, 0x7fffb149a4f0 } +Shifting token 'a' (0x7fffb149a440 'a') +0x55b9344e5f00->Object::Object { 0x55b9344e5ee0, 0x7fffb149a440 } +0x7fffb149a440->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a440 } Entering state 1 Stack now 0 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555c5d0f8b90 'a') --> $$ = nterm item (0x555c5d0f8b90 'a') +0x7fffb149a510->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b9344e5f00 'a') +-> $$ = nterm item (0x7fffb149a510 'a') +0x55b9344e5f00->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a510 } +0x55b9344e5f00->Object::Object { 0x55b9344e5ee0, 0x7fffb149a510 } +0x7fffb149a510->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a510 } Entering state 10 Stack now 0 10 10 Reading a token -0x555c5d0f8be0->Object::Object { 0x555c5d0f8b40, 0x555c5d0f8b90 } -Next token is token 'a' (0x555c5d0f8be0 'a') -Shifting token 'a' (0x555c5d0f8be0 'a') +0x7fffb149a41f->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00 } +0x7fffb149a4f0->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'a' (0x7fffb149a4f0 'a') +0x7fffb149a440->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a440, 0x7fffb149a4f0 } +Shifting token 'a' (0x7fffb149a440 'a') +0x55b9344e5f20->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a440 } +0x7fffb149a440->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a440 } Entering state 1 Stack now 0 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555c5d0f8be0 'a') --> $$ = nterm item (0x555c5d0f8be0 'a') +0x7fffb149a510->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b9344e5f20 'a') +-> $$ = nterm item (0x7fffb149a510 'a') +0x55b9344e5f20->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a510 } +0x55b9344e5f20->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a510 } +0x7fffb149a510->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a510 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x555c5d0f8c30->Object::Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0 } -Next token is token 'a' (0x555c5d0f8c30 'a') -Shifting token 'a' (0x555c5d0f8c30 'a') +0x7fffb149a41f->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20 } +0x7fffb149a4f0->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'a' (0x7fffb149a4f0 'a') +0x7fffb149a440->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a440, 0x7fffb149a4f0 } +Shifting token 'a' (0x7fffb149a440 'a') +0x55b9344e5f40->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a440 } +0x7fffb149a440->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a440 } Entering state 1 Stack now 0 10 10 10 1 -Reducing stack by rule 4 (line 147): - $1 = token 'a' (0x555c5d0f8c30 'a') --> $$ = nterm item (0x555c5d0f8c30 'a') +0x7fffb149a510->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40 } +Reducing stack by rule 4 (line 142): + $1 = token 'a' (0x55b9344e5f40 'a') +-> $$ = nterm item (0x7fffb149a510 'a') +0x55b9344e5f40->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a510 } +0x55b9344e5f40->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a510 } +0x7fffb149a510->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a510 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x555c5d0f8c80->Object::Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0, 0x555c5d0f8c30 } -Next token is token 'p' (0x555c5d0f8c80 'p'Exception caught: cleaning lookahead and stack -0x555c5d0f8c80->Object::~Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0, 0x555c5d0f8c30, 0x555c5d0f8c80 } -0x555c5d0f8c30->Object::~Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0, 0x555c5d0f8c30 } -0x555c5d0f8be0->Object::~Object { 0x555c5d0f8b40, 0x555c5d0f8b90, 0x555c5d0f8be0 } -0x555c5d0f8b90->Object::~Object { 0x555c5d0f8b40, 0x555c5d0f8b90 } -0x555c5d0f8b40->Object::~Object { 0x555c5d0f8b40 } +0x7fffb149a41f->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40 } +0x7fffb149a4f0->Object::Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a41f } +0x7fffb149a41f->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a41f, 0x7fffb149a4f0 } +Next token is token 'p' (0x7fffb149a4f0 'p'Exception caught: cleaning lookahead and stack +0x55b9344e5f40->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x55b9344e5f40, 0x7fffb149a4f0 } +0x55b9344e5f20->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x55b9344e5f20, 0x7fffb149a4f0 } +0x55b9344e5f00->Object::~Object { 0x55b9344e5ee0, 0x55b9344e5f00, 0x7fffb149a4f0 } +0x55b9344e5ee0->Object::~Object { 0x55b9344e5ee0, 0x7fffb149a4f0 } +0x7fffb149a4f0->Object::~Object { 0x7fffb149a4f0 } exception caught: printer end { } -./c++.at:1361: grep '^exception caught: printer$' stderr -./glr-regression.at:2229: $PREPARSER ./input Nwio +./c++.at:1363: grep '^exception caught: printer$' stderr + stdout: -stderr: exception caught: printer -Starting parse -Entering state 0 -Reading a token -Next token is token 'a' () -Shifting token 'a' () -Entering state 1 -Reading a token -Next token is token 'b' () -Shifting token 'b' () -Entering state 3 -Reducing stack 0 by rule 3 (line 30): - $1 = token 'b' () --> $$ = nterm b () -Entering state 4 -Reading a token -Next token is token 'c' () -Shifting token 'c' () -Entering state 6 -Reducing stack 0 by rule 4 (line 31): --> $$ = nterm d () -Entering state 7 -Reading a token -Now at end of input. -Stack 0 Entering state 7 -Now at end of input. -Splitting off stack 1 from 0. -Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. -Stack 1 Entering state 2 -Now at end of input. -Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. -Merging stack 0 into stack 1. -Stack 1 Entering state 2 -Now at end of input. -Removing dead stacks. -Rename stack 1 -> 0. -On stack 0, shifting token "end of file" () -Stack 0 now in state 5 -Ambiguity detected. -Option 1, - start -> - 'a' - b - 'c' - d - -Option 2, - start -> - 'a' - b - 'c' - d - -syntax is ambiguous -Cleanup: popping token "end of file" () -Cleanup: popping unresolved nterm start () -Cleanup: popping nterm d () -Cleanup: popping token 'c' () -Cleanup: popping nterm b () -Cleanup: popping token 'a' () -stderr: -./c++.at:1361: $PREPARSER ./input aaaae -syntax error, unexpected 'o', expecting 'n' -./glr-regression.at:2150: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaae stderr: exception caught: syntax error -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -772. glr-regression.at:2150: ok -./c++.at:1361: $PREPARSER ./input aaaaE -stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaE stderr: exception caught: syntax error, unexpected end of file, expecting 'a' -774. glr-regression.at:2229: ok -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $CC $CFLAGS $CPPFLAGS $LDFLAGS -o input input.c $LIBS +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaaT +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +775. glr-regression.at:2230: testing Predicates: glr.cc ... +./glr-regression.at:2230: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y +./c++.at:1363: $PREPARSER ./input aaaaR +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1363: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./glr-regression.at:2230: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: stdout: ./glr-regression.at:1787: $PREPARSER ./glr-regr15 stderr: -./c++.at:1361: $PREPARSER ./input aaaaT Ambiguity detected. Option 1, ambiguity -> @@ -270843,35 +270664,16 @@ syntax is ambiguous ./glr-regression.at:1787: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -stderr: -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1361: $PREPARSER ./input aaaaR -761. glr-regression.at:1787: stderr: - ok -./c++.at:1361: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -686. c++.at:1361: ok -stderr: -stdout: -./glr-regression.at:2230: $PREPARSER ./input Nwin -stderr: -./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2230: $PREPARSER ./input Owin -stderr: -syntax error, unexpected 'n', expecting 'o' -./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2230: $PREPARSER ./input Owio -stderr: -./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:2230: $PREPARSER ./input Nwio -stderr: -syntax error, unexpected 'o', expecting 'n' -./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -775. glr-regression.at:2230: ok +761. glr-regression.at:1787: ok + +776. glr-regression.at:2231: testing Predicates: glr2.cc ... +./glr-regression.at:2231: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.y stderr: stdout: ./c++.at:1362: $PREPARSER ./input aaaas stderr: exception caught: reduction +./glr-regression.at:2231: $CXX $CPPFLAGS $CXX11_CXXFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaal stderr: @@ -270881,220 +270683,220 @@ stderr: exception caught: initial-action ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./c++.at:858: $PREPARSER ./input ./c++.at:1362: $PREPARSER ./input aaaap stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: -stdout: -./glr-regression.at:1862: $PREPARSER ./glr-regr16 +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS ./c++.at:1362: $PREPARSER ./input --debug aaaap stderr: -stderr: -syntax error Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffd448b1337->Object::Object { } -0x7ffd448b13c0->Object::Object { 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'a' (0x7ffd448b13c0 'a') -0x7ffd448b1310->Object::Object { 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x7ffd448b1310, 0x7ffd448b13c0 } -Shifting token 'a' (0x7ffd448b1310 'a') -0x55d3554f1ee0->Object::Object { 0x7ffd448b1310 } -0x7ffd448b1310->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b1310 } +0x7ffecb0a6497->Object::Object { } +0x7ffecb0a6520->Object::Object { 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'a' (0x7ffecb0a6520 'a') +0x7ffecb0a6470->Object::Object { 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x7ffecb0a6470, 0x7ffecb0a6520 } +Shifting token 'a' (0x7ffecb0a6470 'a') +0x55c1bfcb6ee0->Object::Object { 0x7ffecb0a6470 } +0x7ffecb0a6470->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6470 } Entering state 2 Stack now 0 2 -0x7ffd448b13e0->Object::Object { 0x55d3554f1ee0 } +0x7ffecb0a6540->Object::Object { 0x55c1bfcb6ee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d3554f1ee0 'a') --> $$ = nterm item (0x7ffd448b13e0 'a') -0x55d3554f1ee0->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b13e0 } -0x55d3554f1ee0->Object::Object { 0x7ffd448b13e0 } -0x7ffd448b13e0->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b13e0 } + $1 = token 'a' (0x55c1bfcb6ee0 'a') +-> $$ = nterm item (0x7ffecb0a6540 'a') +0x55c1bfcb6ee0->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6540 } +0x55c1bfcb6ee0->Object::Object { 0x7ffecb0a6540 } +0x7ffecb0a6540->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6540 } Entering state 11 Stack now 0 11 Reading a token -0x7ffd448b1337->Object::Object { 0x55d3554f1ee0 } -0x7ffd448b13c0->Object::Object { 0x55d3554f1ee0, 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'a' (0x7ffd448b13c0 'a') -0x7ffd448b1310->Object::Object { 0x55d3554f1ee0, 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b1310, 0x7ffd448b13c0 } -Shifting token 'a' (0x7ffd448b1310 'a') -0x55d3554f1f00->Object::Object { 0x55d3554f1ee0, 0x7ffd448b1310 } -0x7ffd448b1310->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1310 } +0x7ffecb0a6497->Object::Object { 0x55c1bfcb6ee0 } +0x7ffecb0a6520->Object::Object { 0x55c1bfcb6ee0, 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'a' (0x7ffecb0a6520 'a') +0x7ffecb0a6470->Object::Object { 0x55c1bfcb6ee0, 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6470, 0x7ffecb0a6520 } +Shifting token 'a' (0x7ffecb0a6470 'a') +0x55c1bfcb6f00->Object::Object { 0x55c1bfcb6ee0, 0x7ffecb0a6470 } +0x7ffecb0a6470->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6470 } Entering state 2 Stack now 0 11 2 -0x7ffd448b13e0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00 } +0x7ffecb0a6540->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d3554f1f00 'a') --> $$ = nterm item (0x7ffd448b13e0 'a') -0x55d3554f1f00->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13e0 } -0x55d3554f1f00->Object::Object { 0x55d3554f1ee0, 0x7ffd448b13e0 } -0x7ffd448b13e0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13e0 } + $1 = token 'a' (0x55c1bfcb6f00 'a') +-> $$ = nterm item (0x7ffecb0a6540 'a') +0x55c1bfcb6f00->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6540 } +0x55c1bfcb6f00->Object::Object { 0x55c1bfcb6ee0, 0x7ffecb0a6540 } +0x7ffecb0a6540->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6540 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffd448b1337->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00 } -0x7ffd448b13c0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'a' (0x7ffd448b13c0 'a') -0x7ffd448b1310->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1310, 0x7ffd448b13c0 } -Shifting token 'a' (0x7ffd448b1310 'a') -0x55d3554f1f20->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1310 } -0x7ffd448b1310->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1310 } +0x7ffecb0a6497->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00 } +0x7ffecb0a6520->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'a' (0x7ffecb0a6520 'a') +0x7ffecb0a6470->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6470, 0x7ffecb0a6520 } +Shifting token 'a' (0x7ffecb0a6470 'a') +0x55c1bfcb6f20->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6470 } +0x7ffecb0a6470->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6470 } Entering state 2 Stack now 0 11 11 2 -0x7ffd448b13e0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20 } +0x7ffecb0a6540->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d3554f1f20 'a') --> $$ = nterm item (0x7ffd448b13e0 'a') -0x55d3554f1f20->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13e0 } -0x55d3554f1f20->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13e0 } -0x7ffd448b13e0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13e0 } + $1 = token 'a' (0x55c1bfcb6f20 'a') +-> $$ = nterm item (0x7ffecb0a6540 'a') +0x55c1bfcb6f20->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6540 } +0x55c1bfcb6f20->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6540 } +0x7ffecb0a6540->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6540 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffd448b1337->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20 } -0x7ffd448b13c0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'a' (0x7ffd448b13c0 'a') -0x7ffd448b1310->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1310, 0x7ffd448b13c0 } -Shifting token 'a' (0x7ffd448b1310 'a') -0x55d3554f1f40->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1310 } -0x7ffd448b1310->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b1310 } +0x7ffecb0a6497->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20 } +0x7ffecb0a6520->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'a' (0x7ffecb0a6520 'a') +0x7ffecb0a6470->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6470, 0x7ffecb0a6520 } +Shifting token 'a' (0x7ffecb0a6470 'a') +0x55c1bfcb6f40->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6470 } +0x7ffecb0a6470->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6470 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffd448b13e0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40 } +0x7ffecb0a6540->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d3554f1f40 'a') --> $$ = nterm item (0x7ffd448b13e0 'a') -0x55d3554f1f40->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b13e0 } -0x55d3554f1f40->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13e0 } -0x7ffd448b13e0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b13e0 } + $1 = token 'a' (0x55c1bfcb6f40 'a') +-> $$ = nterm item (0x7ffecb0a6540 'a') +0x55c1bfcb6f40->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6540 } +0x55c1bfcb6f40->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6540 } +0x7ffecb0a6540->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6540 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffd448b1337->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40 } -0x7ffd448b13c0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'p' (0x7ffd448b13c0 'p'Exception caught: cleaning lookahead and stack -0x55d3554f1f40->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b13c0 } -0x55d3554f1f20->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13c0 } -0x55d3554f1f00->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13c0 } -0x55d3554f1ee0->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x7ffd448b13c0 } +0x7ffecb0a6497->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40 } +0x7ffecb0a6520->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'p' (0x7ffecb0a6520 'p'Exception caught: cleaning lookahead and stack +0x55c1bfcb6f40->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6520 } +0x55c1bfcb6f20->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6520 } +0x55c1bfcb6f00->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6520 } +0x55c1bfcb6ee0->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x7ffecb0a6520 } exception caught: printer end { } -./glr-regression.at:1862: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffd448b1337->Object::Object { } -0x7ffd448b13c0->Object::Object { 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'a' (0x7ffd448b13c0 'a') -0x7ffd448b1310->Object::Object { 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x7ffd448b1310, 0x7ffd448b13c0 } -Shifting token 'a' (0x7ffd448b1310 'a') -0x55d3554f1ee0->Object::Object { 0x7ffd448b1310 } -0x7ffd448b1310->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b1310 } +0x7ffecb0a6497->Object::Object { } +0x7ffecb0a6520->Object::Object { 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'a' (0x7ffecb0a6520 'a') +0x7ffecb0a6470->Object::Object { 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x7ffecb0a6470, 0x7ffecb0a6520 } +Shifting token 'a' (0x7ffecb0a6470 'a') +0x55c1bfcb6ee0->Object::Object { 0x7ffecb0a6470 } +0x7ffecb0a6470->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6470 } Entering state 2 Stack now 0 2 -0x7ffd448b13e0->Object::Object { 0x55d3554f1ee0 } +0x7ffecb0a6540->Object::Object { 0x55c1bfcb6ee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d3554f1ee0 'a') --> $$ = nterm item (0x7ffd448b13e0 'a') -0x55d3554f1ee0->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b13e0 } -0x55d3554f1ee0->Object::Object { 0x7ffd448b13e0 } -0x7ffd448b13e0->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b13e0 } + $1 = token 'a' (0x55c1bfcb6ee0 'a') +-> $$ = nterm item (0x7ffecb0a6540 'a') +0x55c1bfcb6ee0->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6540 } +0x55c1bfcb6ee0->Object::Object { 0x7ffecb0a6540 } +0x7ffecb0a6540->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6540 } Entering state 11 Stack now 0 11 Reading a token -0x7ffd448b1337->Object::Object { 0x55d3554f1ee0 } -0x7ffd448b13c0->Object::Object { 0x55d3554f1ee0, 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'a' (0x7ffd448b13c0 'a') -0x7ffd448b1310->Object::Object { 0x55d3554f1ee0, 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b1310, 0x7ffd448b13c0 } -Shifting token 'a' (0x7ffd448b1310 'a') -0x55d3554f1f00->Object::Object { 0x55d3554f1ee0, 0x7ffd448b1310 } -0x7ffd448b1310->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1310 } +0x7ffecb0a6497->Object::Object { 0x55c1bfcb6ee0 } +0x7ffecb0a6520->Object::Object { 0x55c1bfcb6ee0, 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'a' (0x7ffecb0a6520 'a') +0x7ffecb0a6470->Object::Object { 0x55c1bfcb6ee0, 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6470, 0x7ffecb0a6520 } +Shifting token 'a' (0x7ffecb0a6470 'a') +0x55c1bfcb6f00->Object::Object { 0x55c1bfcb6ee0, 0x7ffecb0a6470 } +0x7ffecb0a6470->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6470 } Entering state 2 Stack now 0 11 2 -0x7ffd448b13e0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00 } +0x7ffecb0a6540->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d3554f1f00 'a') --> $$ = nterm item (0x7ffd448b13e0 'a') -0x55d3554f1f00->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13e0 } -0x55d3554f1f00->Object::Object { 0x55d3554f1ee0, 0x7ffd448b13e0 } -0x7ffd448b13e0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13e0 } + $1 = token 'a' (0x55c1bfcb6f00 'a') +-> $$ = nterm item (0x7ffecb0a6540 'a') +0x55c1bfcb6f00->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6540 } +0x55c1bfcb6f00->Object::Object { 0x55c1bfcb6ee0, 0x7ffecb0a6540 } +0x7ffecb0a6540->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6540 } Entering state 11 Stack now 0 11 11 Reading a token -0x7ffd448b1337->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00 } -0x7ffd448b13c0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'a' (0x7ffd448b13c0 'a') -0x7ffd448b1310->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1310, 0x7ffd448b13c0 } -Shifting token 'a' (0x7ffd448b1310 'a') -0x55d3554f1f20->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b1310 } -0x7ffd448b1310->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1310 } +0x7ffecb0a6497->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00 } +0x7ffecb0a6520->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'a' (0x7ffecb0a6520 'a') +0x7ffecb0a6470->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6470, 0x7ffecb0a6520 } +Shifting token 'a' (0x7ffecb0a6470 'a') +0x55c1bfcb6f20->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6470 } +0x7ffecb0a6470->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6470 } Entering state 2 Stack now 0 11 11 2 -0x7ffd448b13e0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20 } +0x7ffecb0a6540->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d3554f1f20 'a') --> $$ = nterm item (0x7ffd448b13e0 'a') -0x55d3554f1f20->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13e0 } -0x55d3554f1f20->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13e0 } -0x7ffd448b13e0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13e0 } + $1 = token 'a' (0x55c1bfcb6f20 'a') +-> $$ = nterm item (0x7ffecb0a6540 'a') +0x55c1bfcb6f20->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6540 } +0x55c1bfcb6f20->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6540 } +0x7ffecb0a6540->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6540 } Entering state 11 Stack now 0 11 11 11 Reading a token -0x7ffd448b1337->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20 } -0x7ffd448b13c0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'a' (0x7ffd448b13c0 'a') -0x7ffd448b1310->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1310, 0x7ffd448b13c0 } -Shifting token 'a' (0x7ffd448b1310 'a') -0x55d3554f1f40->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b1310 } -0x7ffd448b1310->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b1310 } +0x7ffecb0a6497->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20 } +0x7ffecb0a6520->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'a' (0x7ffecb0a6520 'a') +0x7ffecb0a6470->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6470, 0x7ffecb0a6520 } +Shifting token 'a' (0x7ffecb0a6470 'a') +0x55c1bfcb6f40->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6470 } +0x7ffecb0a6470->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6470 } Entering state 2 Stack now 0 11 11 11 2 -0x7ffd448b13e0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40 } +0x7ffecb0a6540->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x55d3554f1f40 'a') --> $$ = nterm item (0x7ffd448b13e0 'a') -0x55d3554f1f40->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b13e0 } -0x55d3554f1f40->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13e0 } -0x7ffd448b13e0->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b13e0 } + $1 = token 'a' (0x55c1bfcb6f40 'a') +-> $$ = nterm item (0x7ffecb0a6540 'a') +0x55c1bfcb6f40->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6540 } +0x55c1bfcb6f40->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6540 } +0x7ffecb0a6540->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6540 } Entering state 11 Stack now 0 11 11 11 11 Reading a token -0x7ffd448b1337->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40 } -0x7ffd448b13c0->Object::Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b1337 } -0x7ffd448b1337->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b1337, 0x7ffd448b13c0 } -Next token is token 'p' (0x7ffd448b13c0 'p'Exception caught: cleaning lookahead and stack -0x55d3554f1f40->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x55d3554f1f40, 0x7ffd448b13c0 } -0x55d3554f1f20->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x55d3554f1f20, 0x7ffd448b13c0 } -0x55d3554f1f00->Object::~Object { 0x55d3554f1ee0, 0x55d3554f1f00, 0x7ffd448b13c0 } -0x55d3554f1ee0->Object::~Object { 0x55d3554f1ee0, 0x7ffd448b13c0 } -0x7ffd448b13c0->Object::~Object { 0x7ffd448b13c0 } +0x7ffecb0a6497->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40 } +0x7ffecb0a6520->Object::Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6497 } +0x7ffecb0a6497->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6497, 0x7ffecb0a6520 } +Next token is token 'p' (0x7ffecb0a6520 'p'Exception caught: cleaning lookahead and stack +0x55c1bfcb6f40->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x55c1bfcb6f40, 0x7ffecb0a6520 } +0x55c1bfcb6f20->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x55c1bfcb6f20, 0x7ffecb0a6520 } +0x55c1bfcb6f00->Object::~Object { 0x55c1bfcb6ee0, 0x55c1bfcb6f00, 0x7ffecb0a6520 } +0x55c1bfcb6ee0->Object::~Object { 0x55c1bfcb6ee0, 0x7ffecb0a6520 } +0x7ffecb0a6520->Object::~Object { 0x7ffecb0a6520 } exception caught: printer end { } ./c++.at:1362: grep '^exception caught: printer$' stderr stdout: -764. glr-regression.at:1862: ok exception caught: printer ./c++.at:1362: $PREPARSER ./input aaaae stderr: @@ -271106,22 +270908,10 @@ ./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr ./c++.at:1362: $PREPARSER ./input aaaaT stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1362: $PREPARSER ./input aaaaR -stderr: -./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -687. c++.at:1362: ok -stderr: -stdout: -./c++.at:858: $PREPARSER ./input -stderr: -./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy stderr: stdout: -./glr-regression.at:2151: $PREPARSER ./input --debug -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2149: $PREPARSER ./input --debug stderr: Starting parse Entering state 0 @@ -271182,27 +270972,15 @@ Cleanup: popping token 'c' () Cleanup: popping nterm b () Cleanup: popping token 'a' () -./glr-regression.at:2151: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -773. glr-regression.at:2151: ok -stderr: -stdout: -./c++.at:1363: $PREPARSER ./input aaaas -stderr: -exception caught: reduction -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input aaaal -stderr: -exception caught: yylex -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1363: $PREPARSER ./input i +./glr-regression.at:2149: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1362: $PREPARSER ./input aaaaR stderr: -exception caught: initial-action +./c++.at:1362: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +771. glr-regression.at:2149: ok +687. c++.at:1362: ok stderr: -./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: -./glr-regression.at:1966: $PREPARSER ./glr-regr17 -./c++.at:1363: $PREPARSER ./input aaaap -stderr: +./glr-regression.at:1965: $PREPARSER ./glr-regr17 stderr: Ambiguity detected. Option 1, @@ -271224,236 +271002,486 @@ empty2 -> 1.1-2.2: syntax is ambiguous +./glr-regression.at:1965: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +./glr-regression.at:1447: $PREPARSER ./glr-regr13 +stderr: +./glr-regression.at:1447: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +755. glr-regression.at:1447: ok +766. glr-regression.at:1965: ok +stderr: +stdout: +./glr-regression.at:2229: $PREPARSER ./input Nwin +stderr: +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Owin +stderr: +syntax error, unexpected 'n', expecting 'o' +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Owio +stderr: +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2229: $PREPARSER ./input Nwio +stderr: +syntax error, unexpected 'o', expecting 'n' +./glr-regression.at:2229: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +774. glr-regression.at:2229: ok +stderr: +stdout: +./glr-regression.at:1862: $PREPARSER ./glr-regr16 +stderr: +syntax error +./glr-regression.at:1862: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +764. glr-regression.at:1862: ok +stderr: +stdout: +./glr-regression.at:2150: $PREPARSER ./input --debug +stderr: +Starting parse +Entering state 0 +Reading a token +Next token is token 'a' () +Shifting token 'a' () +Entering state 1 +Reading a token +Next token is token 'b' () +Shifting token 'b' () +Entering state 3 +Reducing stack 0 by rule 3 (line 30): + $1 = token 'b' () +-> $$ = nterm b () +Entering state 4 +Reading a token +Next token is token 'c' () +Shifting token 'c' () +Entering state 6 +Reducing stack 0 by rule 4 (line 31): +-> $$ = nterm d () +Entering state 7 +Reading a token +Now at end of input. +Stack 0 Entering state 7 +Now at end of input. +Splitting off stack 1 from 0. +Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. +Stack 1 Entering state 2 +Now at end of input. +Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. +Merging stack 0 into stack 1. +Stack 1 Entering state 2 +Now at end of input. +Removing dead stacks. +Rename stack 1 -> 0. +On stack 0, shifting token "end of file" () +Stack 0 now in state 5 +Ambiguity detected. +Option 1, + start -> + 'a' + b + 'c' + d + +Option 2, + start -> + 'a' + b + 'c' + d + +syntax is ambiguous +Cleanup: popping token "end of file" () +Cleanup: popping unresolved nterm start () +Cleanup: popping nterm d () +Cleanup: popping token 'c' () +Cleanup: popping nterm b () +Cleanup: popping token 'a' () +./glr-regression.at:2150: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +772. glr-regression.at:2150: ok +stderr: +stdout: +./glr-regression.at:1680: $PREPARSER ./glr-regr14 +stderr: +./glr-regression.at:1680: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +758. glr-regression.at:1680: ok +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./glr-regression.at:2230: $PREPARSER ./input Nwin +stderr: +./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2230: $PREPARSER ./input Owin +stderr: +syntax error, unexpected 'n', expecting 'o' +./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2230: $PREPARSER ./input Owio +stderr: +./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:2230: $PREPARSER ./input Nwio +stderr: +syntax error, unexpected 'o', expecting 'n' +./glr-regression.at:2230: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +775. glr-regression.at:2230: ok +stderr: +stdout: +./c++.at:1363: $PREPARSER ./input aaaas +stderr: +exception caught: reduction ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./glr-regression.at:1966: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input aaaal +stderr: +exception caught: yylex +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:1363: $PREPARSER ./input i +stderr: +exception caught: initial-action +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +./c++.at:1363: $PREPARSER ./input aaaap +stdout: +./c++.at:1066: $PREPARSER ./input < in +stderr: +./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: ./c++.at:1363: $PREPARSER ./input --debug aaaap +error: invalid expression +caught error +error: invalid character +caught error +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +./c++.at:1066: $PREPARSER ./input < in Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffc169e878f->Object::Object { } -0x7ffc169e8860->Object::Object { 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'a' (0x7ffc169e8860 'a') -0x7ffc169e87b0->Object::Object { 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x7ffc169e87b0, 0x7ffc169e8860 } -Shifting token 'a' (0x7ffc169e87b0 'a') -0x562d2f166ee0->Object::Object { 0x7ffc169e87b0 } -0x7ffc169e87b0->Object::~Object { 0x562d2f166ee0, 0x7ffc169e87b0 } +0x7ffcf8ea111f->Object::Object { } +0x7ffcf8ea11f0->Object::Object { 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'a' (0x7ffcf8ea11f0 'a') +0x7ffcf8ea1140->Object::Object { 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x7ffcf8ea1140, 0x7ffcf8ea11f0 } +Shifting token 'a' (0x7ffcf8ea1140 'a') +0x55a8820acee0->Object::Object { 0x7ffcf8ea1140 } +0x7ffcf8ea1140->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea1140 } Entering state 1 Stack now 0 1 -0x7ffc169e8880->Object::Object { 0x562d2f166ee0 } +0x7ffcf8ea1210->Object::Object { 0x55a8820acee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x562d2f166ee0 'a') --> $$ = nterm item (0x7ffc169e8880 'a') -0x562d2f166ee0->Object::~Object { 0x562d2f166ee0, 0x7ffc169e8880 } -0x562d2f166ee0->Object::Object { 0x7ffc169e8880 } -0x7ffc169e8880->Object::~Object { 0x562d2f166ee0, 0x7ffc169e8880 } + $1 = token 'a' (0x55a8820acee0 'a') +-> $$ = nterm item (0x7ffcf8ea1210 'a') +0x55a8820acee0->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea1210 } +0x55a8820acee0->Object::Object { 0x7ffcf8ea1210 } +0x7ffcf8ea1210->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea1210 } Entering state 10 Stack now 0 10 Reading a token -0x7ffc169e878f->Object::Object { 0x562d2f166ee0 } -0x7ffc169e8860->Object::Object { 0x562d2f166ee0, 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x562d2f166ee0, 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'a' (0x7ffc169e8860 'a') -0x7ffc169e87b0->Object::Object { 0x562d2f166ee0, 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x562d2f166ee0, 0x7ffc169e87b0, 0x7ffc169e8860 } -Shifting token 'a' (0x7ffc169e87b0 'a') -0x562d2f166f00->Object::Object { 0x562d2f166ee0, 0x7ffc169e87b0 } -0x7ffc169e87b0->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e87b0 } +0x7ffcf8ea111f->Object::Object { 0x55a8820acee0 } +0x7ffcf8ea11f0->Object::Object { 0x55a8820acee0, 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'a' (0x7ffcf8ea11f0 'a') +0x7ffcf8ea1140->Object::Object { 0x55a8820acee0, 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea1140, 0x7ffcf8ea11f0 } +Shifting token 'a' (0x7ffcf8ea1140 'a') +0x55a8820acf00->Object::Object { 0x55a8820acee0, 0x7ffcf8ea1140 } +0x7ffcf8ea1140->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1140 } Entering state 1 Stack now 0 10 1 -0x7ffc169e8880->Object::Object { 0x562d2f166ee0, 0x562d2f166f00 } +0x7ffcf8ea1210->Object::Object { 0x55a8820acee0, 0x55a8820acf00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x562d2f166f00 'a') --> $$ = nterm item (0x7ffc169e8880 'a') -0x562d2f166f00->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8880 } -0x562d2f166f00->Object::Object { 0x562d2f166ee0, 0x7ffc169e8880 } -0x7ffc169e8880->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8880 } + $1 = token 'a' (0x55a8820acf00 'a') +-> $$ = nterm item (0x7ffcf8ea1210 'a') +0x55a8820acf00->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1210 } +0x55a8820acf00->Object::Object { 0x55a8820acee0, 0x7ffcf8ea1210 } +0x7ffcf8ea1210->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1210 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffc169e878f->Object::Object { 0x562d2f166ee0, 0x562d2f166f00 } -0x7ffc169e8860->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'a' (0x7ffc169e8860 'a') -0x7ffc169e87b0->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e87b0, 0x7ffc169e8860 } -Shifting token 'a' (0x7ffc169e87b0 'a') -0x562d2f166f20->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e87b0 } -0x7ffc169e87b0->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e87b0 } +0x7ffcf8ea111f->Object::Object { 0x55a8820acee0, 0x55a8820acf00 } +0x7ffcf8ea11f0->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'a' (0x7ffcf8ea11f0 'a') +0x7ffcf8ea1140->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1140, 0x7ffcf8ea11f0 } +Shifting token 'a' (0x7ffcf8ea1140 'a') +0x55a8820acf20->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1140 } +0x7ffcf8ea1140->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1140 } Entering state 1 Stack now 0 10 10 1 -0x7ffc169e8880->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20 } +0x7ffcf8ea1210->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x562d2f166f20 'a') --> $$ = nterm item (0x7ffc169e8880 'a') -0x562d2f166f20->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8880 } -0x562d2f166f20->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8880 } -0x7ffc169e8880->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8880 } + $1 = token 'a' (0x55a8820acf20 'a') +-> $$ = nterm item (0x7ffcf8ea1210 'a') +0x55a8820acf20->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1210 } +0x55a8820acf20->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1210 } +0x7ffcf8ea1210->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1210 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffc169e878f->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20 } -0x7ffc169e8860->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'a' (0x7ffc169e8860 'a') -0x7ffc169e87b0->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e87b0, 0x7ffc169e8860 } -Shifting token 'a' (0x7ffc169e87b0 'a') -0x562d2f166f40->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e87b0 } -0x7ffc169e87b0->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e87b0 } +0x7ffcf8ea111f->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20 } +0x7ffcf8ea11f0->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'a' (0x7ffcf8ea11f0 'a') +0x7ffcf8ea1140->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1140, 0x7ffcf8ea11f0 } +Shifting token 'a' (0x7ffcf8ea1140 'a') +0x55a8820acf40->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1140 } +0x7ffcf8ea1140->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea1140 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffc169e8880->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40 } +0x7ffcf8ea1210->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x562d2f166f40 'a') --> $$ = nterm item (0x7ffc169e8880 'a') -0x562d2f166f40->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e8880 } -0x562d2f166f40->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8880 } -0x7ffc169e8880->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e8880 } + $1 = token 'a' (0x55a8820acf40 'a') +-> $$ = nterm item (0x7ffcf8ea1210 'a') +0x55a8820acf40->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea1210 } +0x55a8820acf40->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1210 } +0x7ffcf8ea1210->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea1210 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffc169e878f->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40 } -0x7ffc169e8860->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'p' (0x7ffc169e8860 'p'Exception caught: cleaning lookahead and stack -0x562d2f166f40->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e8860 } -0x562d2f166f20->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8860 } -0x562d2f166f00->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8860 } -0x562d2f166ee0->Object::~Object { 0x562d2f166ee0, 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x7ffc169e8860 } +0x7ffcf8ea111f->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40 } +0x7ffcf8ea11f0->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'p' (0x7ffcf8ea11f0 'p'Exception caught: cleaning lookahead and stack +0x55a8820acf40->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea11f0 } +0x55a8820acf20->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea11f0 } +0x55a8820acf00->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea11f0 } +0x55a8820acee0->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x7ffcf8ea11f0 } exception caught: printer end { } -767. glr-regression.at:1966: ok ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stderr: +stderr: Starting parse Entering state 0 Stack now 0 Reading a token -0x7ffc169e878f->Object::Object { } -0x7ffc169e8860->Object::Object { 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'a' (0x7ffc169e8860 'a') -0x7ffc169e87b0->Object::Object { 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x7ffc169e87b0, 0x7ffc169e8860 } -Shifting token 'a' (0x7ffc169e87b0 'a') -0x562d2f166ee0->Object::Object { 0x7ffc169e87b0 } -0x7ffc169e87b0->Object::~Object { 0x562d2f166ee0, 0x7ffc169e87b0 } +0x7ffcf8ea111f->Object::Object { } +0x7ffcf8ea11f0->Object::Object { 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'a' (0x7ffcf8ea11f0 'a') +0x7ffcf8ea1140->Object::Object { 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x7ffcf8ea1140, 0x7ffcf8ea11f0 } +Shifting token 'a' (0x7ffcf8ea1140 'a') +0x55a8820acee0->Object::Object { 0x7ffcf8ea1140 } +0x7ffcf8ea1140->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea1140 } Entering state 1 Stack now 0 1 -0x7ffc169e8880->Object::Object { 0x562d2f166ee0 } +0x7ffcf8ea1210->Object::Object { 0x55a8820acee0 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x562d2f166ee0 'a') --> $$ = nterm item (0x7ffc169e8880 'a') -0x562d2f166ee0->Object::~Object { 0x562d2f166ee0, 0x7ffc169e8880 } -0x562d2f166ee0->Object::Object { 0x7ffc169e8880 } -0x7ffc169e8880->Object::~Object { 0x562d2f166ee0, 0x7ffc169e8880 } + $1 = token 'a' (0x55a8820acee0 'a') +-> $$ = nterm item (0x7ffcf8ea1210 'a') +0x55a8820acee0->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea1210 } +0x55a8820acee0->Object::Object { 0x7ffcf8ea1210 } +0x7ffcf8ea1210->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea1210 } Entering state 10 Stack now 0 10 Reading a token -0x7ffc169e878f->Object::Object { 0x562d2f166ee0 } -0x7ffc169e8860->Object::Object { 0x562d2f166ee0, 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x562d2f166ee0, 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'a' (0x7ffc169e8860 'a') -0x7ffc169e87b0->Object::Object { 0x562d2f166ee0, 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x562d2f166ee0, 0x7ffc169e87b0, 0x7ffc169e8860 } -Shifting token 'a' (0x7ffc169e87b0 'a') -0x562d2f166f00->Object::Object { 0x562d2f166ee0, 0x7ffc169e87b0 } -0x7ffc169e87b0->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e87b0 } +0x7ffcf8ea111f->Object::Object { 0x55a8820acee0 } +0x7ffcf8ea11f0->Object::Object { 0x55a8820acee0, 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'a' (0x7ffcf8ea11f0 'a') +0x7ffcf8ea1140->Object::Object { 0x55a8820acee0, 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea1140, 0x7ffcf8ea11f0 } +Shifting token 'a' (0x7ffcf8ea1140 'a') +0x55a8820acf00->Object::Object { 0x55a8820acee0, 0x7ffcf8ea1140 } +0x7ffcf8ea1140->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1140 } Entering state 1 Stack now 0 10 1 -0x7ffc169e8880->Object::Object { 0x562d2f166ee0, 0x562d2f166f00 } +0x7ffcf8ea1210->Object::Object { 0x55a8820acee0, 0x55a8820acf00 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x562d2f166f00 'a') --> $$ = nterm item (0x7ffc169e8880 'a') -0x562d2f166f00->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8880 } -0x562d2f166f00->Object::Object { 0x562d2f166ee0, 0x7ffc169e8880 } -0x7ffc169e8880->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8880 } + $1 = token 'a' (0x55a8820acf00 'a') +-> $$ = nterm item (0x7ffcf8ea1210 'a') +0x55a8820acf00->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1210 } +0x55a8820acf00->Object::Object { 0x55a8820acee0, 0x7ffcf8ea1210 } +0x7ffcf8ea1210->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1210 } Entering state 10 Stack now 0 10 10 Reading a token -0x7ffc169e878f->Object::Object { 0x562d2f166ee0, 0x562d2f166f00 } -0x7ffc169e8860->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'a' (0x7ffc169e8860 'a') -0x7ffc169e87b0->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e87b0, 0x7ffc169e8860 } -Shifting token 'a' (0x7ffc169e87b0 'a') -0x562d2f166f20->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e87b0 } -0x7ffc169e87b0->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e87b0 } +0x7ffcf8ea111f->Object::Object { 0x55a8820acee0, 0x55a8820acf00 } +0x7ffcf8ea11f0->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'a' (0x7ffcf8ea11f0 'a') +0x7ffcf8ea1140->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1140, 0x7ffcf8ea11f0 } +Shifting token 'a' (0x7ffcf8ea1140 'a') +0x55a8820acf20->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1140 } +0x7ffcf8ea1140->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1140 } Entering state 1 Stack now 0 10 10 1 -0x7ffc169e8880->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20 } +0x7ffcf8ea1210->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x562d2f166f20 'a') --> $$ = nterm item (0x7ffc169e8880 'a') -0x562d2f166f20->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8880 } -0x562d2f166f20->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8880 } -0x7ffc169e8880->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8880 } + $1 = token 'a' (0x55a8820acf20 'a') +-> $$ = nterm item (0x7ffcf8ea1210 'a') +0x55a8820acf20->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1210 } +0x55a8820acf20->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea1210 } +0x7ffcf8ea1210->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1210 } Entering state 10 Stack now 0 10 10 10 Reading a token -0x7ffc169e878f->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20 } -0x7ffc169e8860->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'a' (0x7ffc169e8860 'a') -0x7ffc169e87b0->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e87b0, 0x7ffc169e8860 } -Shifting token 'a' (0x7ffc169e87b0 'a') -0x562d2f166f40->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e87b0 } -0x7ffc169e87b0->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e87b0 } +0x7ffcf8ea111f->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20 } +0x7ffcf8ea11f0->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'a' (0x7ffcf8ea11f0 'a') +0x7ffcf8ea1140->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1140, 0x7ffcf8ea11f0 } +Shifting token 'a' (0x7ffcf8ea1140 'a') +0x55a8820acf40->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1140 } +0x7ffcf8ea1140->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea1140 } Entering state 1 Stack now 0 10 10 10 1 -0x7ffc169e8880->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40 } +0x7ffcf8ea1210->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40 } Reducing stack by rule 4 (line 142): - $1 = token 'a' (0x562d2f166f40 'a') --> $$ = nterm item (0x7ffc169e8880 'a') -0x562d2f166f40->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e8880 } -0x562d2f166f40->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8880 } -0x7ffc169e8880->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e8880 } + $1 = token 'a' (0x55a8820acf40 'a') +-> $$ = nterm item (0x7ffcf8ea1210 'a') +0x55a8820acf40->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea1210 } +0x55a8820acf40->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea1210 } +0x7ffcf8ea1210->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea1210 } Entering state 10 Stack now 0 10 10 10 10 Reading a token -0x7ffc169e878f->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40 } -0x7ffc169e8860->Object::Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e878f } -0x7ffc169e878f->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e878f, 0x7ffc169e8860 } -Next token is token 'p' (0x7ffc169e8860 'p'Exception caught: cleaning lookahead and stack -0x562d2f166f40->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x562d2f166f40, 0x7ffc169e8860 } -0x562d2f166f20->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x562d2f166f20, 0x7ffc169e8860 } -0x562d2f166f00->Object::~Object { 0x562d2f166ee0, 0x562d2f166f00, 0x7ffc169e8860 } -0x562d2f166ee0->Object::~Object { 0x562d2f166ee0, 0x7ffc169e8860 } -0x7ffc169e8860->Object::~Object { 0x7ffc169e8860 } +0x7ffcf8ea111f->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40 } +0x7ffcf8ea11f0->Object::Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea111f } +0x7ffcf8ea111f->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea111f, 0x7ffcf8ea11f0 } +Next token is token 'p' (0x7ffcf8ea11f0 'p'Exception caught: cleaning lookahead and stack +0x55a8820acf40->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x55a8820acf40, 0x7ffcf8ea11f0 } +0x55a8820acf20->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x55a8820acf20, 0x7ffcf8ea11f0 } +0x55a8820acf00->Object::~Object { 0x55a8820acee0, 0x55a8820acf00, 0x7ffcf8ea11f0 } +0x55a8820acee0->Object::~Object { 0x55a8820acee0, 0x7ffcf8ea11f0 } +0x7ffcf8ea11f0->Object::~Object { 0x7ffcf8ea11f0 } exception caught: printer end { } +error: invalid expression ./c++.at:1363: grep '^exception caught: printer$' stderr +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr stdout: exception caught: printer ./c++.at:1363: $PREPARSER ./input aaaae +./c++.at:1066: $PREPARSER ./input < in +stderr: stderr: exception caught: syntax error ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: +error: invalid character +./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./glr-regression.at:1966: $PREPARSER ./glr-regr17 +stderr: ./c++.at:1363: $PREPARSER ./input aaaaE +Ambiguity detected. +Option 1, + start -> + ambig1 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty1 -> + +Option 2, + start -> + ambig2 -> + sub_ambig2 -> + empty2 -> + 'a' + 'b' + empty2 -> + +1.1-2.2: syntax is ambiguous +./glr-regression.at:1966: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +684. c++.at:1066: ok stderr: exception caught: syntax error, unexpected end of file, expecting 'a' ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +767. glr-regression.at:1966: ok ./c++.at:1363: $PREPARSER ./input aaaaT stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +stderr: +stdout: ./c++.at:1363: $PREPARSER ./input aaaaR +./c++.at:858: $PREPARSER ./input +stderr: stderr: ./c++.at:1363: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -688. c++.at:1363: ok +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +688. c++.at:1363: ======== Testing with C++ standard flags: '' + ok +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: -./c++.at:859: $PREPARSER ./input +./glr-regression.at:2151: $PREPARSER ./input --debug stderr: -./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -======== Testing with C++ standard flags: '' -./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +Starting parse +Entering state 0 +Reading a token +Next token is token 'a' () +Shifting token 'a' () +Entering state 1 +Reading a token +Next token is token 'b' () +Shifting token 'b' () +Entering state 3 +Reducing stack 0 by rule 3 (line 30): + $1 = token 'b' () +-> $$ = nterm b () +Entering state 4 +Reading a token +Next token is token 'c' () +Shifting token 'c' () +Entering state 6 +Reducing stack 0 by rule 4 (line 31): +-> $$ = nterm d () +Entering state 7 +Reading a token +Now at end of input. +Stack 0 Entering state 7 +Now at end of input. +Splitting off stack 1 from 0. +Reduced stack 1 by rule 2 (line 28); action deferred. Now in state 2. +Stack 1 Entering state 2 +Now at end of input. +Reduced stack 0 by rule 1 (line 27); action deferred. Now in state 2. +Merging stack 0 into stack 1. +Stack 1 Entering state 2 +Now at end of input. +Removing dead stacks. +Rename stack 1 -> 0. +On stack 0, shifting token "end of file" () +Stack 0 now in state 5 +Ambiguity detected. +Option 1, + start -> + 'a' + b + 'c' + d + +Option 2, + start -> + 'a' + b + 'c' + d + +syntax is ambiguous +Cleanup: popping token "end of file" () +Cleanup: popping unresolved nterm start () +Cleanup: popping nterm d () +Cleanup: popping token 'c' () +Cleanup: popping nterm b () +Cleanup: popping token 'a' () +./glr-regression.at:2151: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +773. glr-regression.at:2151: ok stderr: stdout: ./glr-regression.at:2231: $PREPARSER ./input Nwin @@ -271473,22 +271501,33 @@ 776. glr-regression.at:2231: ok stderr: stdout: -./c++.at:1066: $PREPARSER ./input < in +./c++.at:858: $PREPARSER ./input stderr: -error: invalid expression -caught error -error: invalid character -caught error -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: -error: invalid expression -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -./c++.at:1066: $PREPARSER ./input < in +stdout: +./c++.at:858: $PREPARSER ./input stderr: -error: invalid character -./c++.at:1066: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr -684. c++.at:1066: ok +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:858: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:858: $PREPARSER ./input +stderr: +./c++.at:858: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +./c++.at:859: COLUMNS=1000; export COLUMNS; NO_TERM_HYPERLINKS=1; export NO_TERM_HYPERLINKS; bison --color=no -fno-caret -o input.cc input.yy +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS +stderr: +stdout: +./c++.at:859: $PREPARSER ./input +stderr: +./c++.at:859: sed >&2 -e '/^profiling:.*:Merge mismatch for summaries/d' stderr +======== Testing with C++ standard flags: '' +./c++.at:859: $CXX $CPPFLAGS $CXXFLAGS $LDFLAGS -o input input.cc $LIBS stderr: stdout: ./c++.at:859: $PREPARSER ./input @@ -271611,7 +271650,7 @@ dh_testroot dh_prep dh_auto_install - make -j16 install DESTDIR=/build/bison-3.8.2\+dfsg/debian/tmp AM_UPDATE_INFO_DIR=no + make -j15 install DESTDIR=/build/bison-3.8.2\+dfsg/debian/tmp AM_UPDATE_INFO_DIR=no make[1]: Entering directory '/build/bison-3.8.2+dfsg' make install-recursive make[2]: Entering directory '/build/bison-3.8.2+dfsg' @@ -271648,7 +271687,7 @@ installing zh_CN.gmo as /build/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_CN/LC_MESSAGES/bison.mo installing zh_TW.gmo as /build/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo if test "bison" = "gettext-tools"; then \ - /bin/mkdir -p /build/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po; \ + /usr/bin/mkdir -p /build/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po; \ for file in Makefile.in.in remove-potcdate.sin quot.sed boldquot.sed en@quot.header en@boldquot.header insert-header.sin Rules-quot Makevars.template; do \ /usr/bin/install -c -m 644 ./$file \ /build/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po/$file; \ @@ -271704,7 +271743,7 @@ installing zh_CN.gmo as /build/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_CN/LC_MESSAGES/bison-runtime.mo installing zh_TW.gmo as /build/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_TW/LC_MESSAGES/bison-runtime.mo if test "bison" = "gettext-tools"; then \ - /bin/mkdir -p /build/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po; \ + /usr/bin/mkdir -p /build/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po; \ for file in Makefile.in.in remove-potcdate.sin quot.sed boldquot.sed en@quot.header en@boldquot.header insert-header.sin Rules-quot Makevars.template; do \ /usr/bin/install -c -m 644 ./$file \ /build/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po/$file; \ @@ -271757,7 +271796,7 @@ installing zh_CN.gmo as /build/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_CN/LC_MESSAGES/bison-gnulib.mo installing zh_TW.gmo as /build/bison-3.8.2+dfsg/debian/tmp/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo if test "bison" = "gettext-tools"; then \ - /bin/mkdir -p /build/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po; \ + /usr/bin/mkdir -p /build/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po; \ for file in Makefile.in.in remove-potcdate.sin quot.sed boldquot.sed en@quot.header en@boldquot.header insert-header.sin Rules-quot Makevars.template; do \ /usr/bin/install -c -m 644 ./$file \ /build/bison-3.8.2+dfsg/debian/tmp/usr/share/gettext/po/$file; \ @@ -271771,84 +271810,84 @@ make[3]: Leaving directory '/build/bison-3.8.2+dfsg/gnulib-po' Making install in . make[3]: Entering directory '/build/bison-3.8.2+dfsg' -/bin/mkdir -p doc +/usr/bin/mkdir -p doc LC_ALL=C tests/bison --version >doc/bison.help.tmp LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help make[4]: Entering directory '/build/bison-3.8.2+dfsg' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' - /usr/bin/install -c -m 644 lib/liby.a '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' /usr/bin/install -c src/bison '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin' - /usr/bin/install -c -m 644 m4/bison-i18n.m4 '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' + /usr/bin/install -c -m 644 lib/liby.a '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' /usr/bin/install -c src/yacc '/build/bison-3.8.2+dfsg/debian/tmp/usr/bin' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' - /usr/bin/install -c -m 644 examples/c/bistromathic/parse.y examples/c/bistromathic/Makefile examples/c/bistromathic/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' - /usr/bin/install -c -m 644 examples/c++/simple.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' + /usr/bin/install -c -m 644 m4/bison-i18n.m4 '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/aclocal' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' /usr/bin/install -c -m 644 examples/c++/calc++/driver.cc examples/c++/calc++/driver.hh examples/c++/calc++/scanner.ll examples/c++/calc++/calc++.cc examples/c++/calc++/parser.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' + /usr/bin/install -c -m 644 examples/c++/simple.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' + /usr/bin/install -c -m 644 examples/c/bistromathic/parse.y examples/c/bistromathic/Makefile examples/c/bistromathic/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/bistromathic' ( cd '/build/bison-3.8.2+dfsg/debian/tmp/usr/lib/x86_64-linux-gnu' && ranlib liby.a ) + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' /usr/bin/install -c -m 644 examples/c/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' - /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' /usr/bin/install -c -m 644 examples/d/calc/calc.y examples/d/calc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/calc' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' + /usr/bin/install -c -m 644 examples/c/calc/calc.y examples/c/calc/Makefile examples/c/calc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/calc' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' /usr/bin/install -c -m 644 examples/c++/calc++/README.md examples/c++/calc++/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++/calc++' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' - /usr/bin/install -c -m 644 examples/c++/README.md examples/c++/Makefile examples/c++/variant.yy examples/c++/variant-11.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' /usr/bin/install -c -m 644 examples/d/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' + /usr/bin/install -c -m 644 examples/c++/README.md examples/c++/Makefile examples/c++/variant.yy examples/c++/variant-11.yy '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c++' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' /usr/bin/install -c -m 644 examples/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' /usr/bin/install -c -m 644 AUTHORS COPYING NEWS README THANKS TODO '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' /usr/bin/install -c -m 644 examples/c/glr/c++-types.y examples/c/glr/Makefile examples/c/glr/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/glr' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' /usr/bin/install -c -m 644 examples/java/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' /usr/bin/install -c -m 644 examples/java/calc/Calc.y examples/java/calc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/calc' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' /usr/bin/install -c -m 644 examples/java/simple/Calc.y examples/java/simple/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/java/simple' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' - /usr/bin/install -c -m 644 data/m4sugar/foreach.m4 data/m4sugar/m4sugar.m4 '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' /usr/bin/install -c -m 644 examples/c/lexcalc/parse.y examples/c/lexcalc/scan.l examples/c/lexcalc/Makefile examples/c/lexcalc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/lexcalc' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' - /usr/bin/install -c -m 644 data/README.md data/bison-default.css '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' /usr/bin/install -c -m 644 examples/c/mfcalc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' + /usr/bin/install -c -m 644 data/m4sugar/foreach.m4 data/m4sugar/m4sugar.m4 '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/m4sugar' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' + /usr/bin/install -c -m 644 data/README.md data/bison-default.css '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison' /usr/bin/install -c -m 644 examples/c/pushcalc/calc.y examples/c/pushcalc/Makefile examples/c/pushcalc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/pushcalc' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' - /usr/bin/install -c -m 644 examples/c/reccalc/parse.y examples/c/reccalc/scan.l examples/c/reccalc/Makefile examples/c/reccalc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' /usr/bin/install -c -m 644 examples/c/rpcalc/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' + /usr/bin/install -c -m 644 examples/c/reccalc/parse.y examples/c/reccalc/scan.l examples/c/reccalc/Makefile examples/c/reccalc/README.md '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/reccalc' /usr/bin/install -c -m 644 examples/d/simple/calc.y examples/d/simple/Makefile '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/d/simple' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' -/bin/mkdir -p doc - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' - /usr/bin/install -c -m 644 data/xslt/bison.xsl data/xslt/xml2dot.xsl data/xslt/xml2text.xsl data/xslt/xml2xhtml.xsl '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' +/usr/bin/mkdir -p doc + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' /usr/bin/install -c -m 644 data/skeletons/bison.m4 data/skeletons/c++-skel.m4 data/skeletons/c++.m4 data/skeletons/c-like.m4 data/skeletons/c-skel.m4 data/skeletons/c.m4 data/skeletons/glr.c data/skeletons/glr.cc data/skeletons/glr2.cc data/skeletons/java-skel.m4 data/skeletons/java.m4 data/skeletons/lalr1.cc data/skeletons/lalr1.java data/skeletons/location.cc data/skeletons/stack.hh data/skeletons/traceon.m4 data/skeletons/variant.hh data/skeletons/yacc.c data/skeletons/d-skel.m4 data/skeletons/d.m4 data/skeletons/lalr1.d '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/skeletons' LC_ALL=C tests/bison --version >doc/bison.help.tmp - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' + /usr/bin/install -c -m 644 data/xslt/bison.xsl data/xslt/xml2dot.xsl data/xslt/xml2text.xsl data/xslt/xml2xhtml.xsl '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/bison/xslt' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' + /usr/bin/install -c -m 644 examples/c/rpcalc/rpcalc.y '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' /usr/bin/install -c -m 644 examples/c/mfcalc/calc.h examples/c/mfcalc/mfcalc.y '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/mfcalc' LC_ALL=C tests/bison --help | \ sed -e 's,^Usage: .*/bison \[OPTION\],Usage: bison [OPTION],g' \ -e '/translation bugs/d' >>doc/bison.help.tmp - /usr/bin/install -c -m 644 examples/c/rpcalc/rpcalc.y '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/doc/bison/examples/c/rpcalc' ./build-aux/move-if-change doc/bison.help.tmp doc/bison.help - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/man/man1' - /bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/info' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/man/man1' + /usr/bin/mkdir -p '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/info' /usr/bin/install -c -m 644 ./doc/bison.1 doc/yacc.1 '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/man/man1' /usr/bin/install -c -m 644 ./doc/bison.info '/build/bison-3.8.2+dfsg/debian/tmp/usr/share/info' make[4]: Leaving directory '/build/bison-3.8.2+dfsg' @@ -271871,6 +271910,8 @@ dh_perl dh_link dh_strip_nondeterminism +Garbage at end of string in strptime: +02:00 at /usr/lib/x86_64-linux-gnu/perl/5.36/Time/Piece.pm line 598. +Perhaps a format flag did not match the actual input? at /usr/lib/x86_64-linux-gnu/perl/5.36/Time/Piece.pm line 598. Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/zh_TW/LC_MESSAGES/bison.mo @@ -271879,6 +271920,14 @@ Normalized debian/bison/usr/share/locale/zh_CN/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ta/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/sq/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/vi/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/uk/LC_MESSAGES/bison-runtime.mo @@ -271887,31 +271936,29 @@ Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/tr/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/th/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ta/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sv/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-gnulib.mo -Garbage at end of string in strptime: +02:00 at /usr/lib/x86_64-linux-gnu/perl/5.36/Time/Piece.pm line 598. -Perhaps a format flag did not match the actual input? at /usr/lib/x86_64-linux-gnu/perl/5.36/Time/Piece.pm line 598. - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ast/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/af/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/sr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/sq/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/sl/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/sk/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/rw/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/be/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ast/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/af/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ru/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ro/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/nl/LC_MESSAGES/bison-runtime.mo @@ -271919,13 +271966,6 @@ Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/nb/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pt_BR/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/pt/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/pl/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/ms/LC_MESSAGES/bison.mo @@ -271933,13 +271973,6 @@ Normalized debian/bison/usr/share/locale/lt/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/ky/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/ko/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/ja/LC_MESSAGES/bison.mo @@ -271947,6 +271980,13 @@ Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/it/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/id/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/ia/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/hu/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/hr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/gl/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/ga/LC_MESSAGES/bison-runtime.mo @@ -271954,6 +271994,21 @@ Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/fr/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/cs/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison.mo + Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo + Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison.mo Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/es/LC_MESSAGES/bison.mo @@ -271966,22 +272021,6 @@ Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-gnulib.mo Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison-runtime.mo Normalized debian/bison/usr/share/locale/de/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/fi/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/eu/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/et/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/da/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/cs/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison-runtime.mo - Normalized debian/bison/usr/share/locale/ca/LC_MESSAGES/bison.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-gnulib.mo - Normalized debian/bison/usr/share/locale/bg/LC_MESSAGES/bison-runtime.mo dh_compress dh_fixperms dh_missing @@ -271993,8 +272032,8 @@ dh_gencontrol dh_md5sums dh_builddeb -dpkg-deb: building package 'bison-dbgsym' in '../bison-dbgsym_3.8.2+dfsg-1_amd64.deb'. dpkg-deb: building package 'libbison-dev' in '../libbison-dev_3.8.2+dfsg-1_amd64.deb'. +dpkg-deb: building package 'bison-dbgsym' in '../bison-dbgsym_3.8.2+dfsg-1_amd64.deb'. dpkg-deb: building package 'bison' in '../bison_3.8.2+dfsg-1_amd64.deb'. dpkg-genbuildinfo --build=binary -O../bison_3.8.2+dfsg-1_amd64.buildinfo dpkg-genchanges --build=binary -O../bison_3.8.2+dfsg-1_amd64.changes @@ -272003,12 +272042,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: including full source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/1905680/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/1905680/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/1623204 and its subdirectories -I: Current time: Tue May 21 12:02:48 -12 2024 -I: pbuilder-time-stamp: 1716336168 +I: removing directory /srv/workspace/pbuilder/1905680 and its subdirectories +I: Current time: Thu Apr 20 07:51:45 +14 2023 +I: pbuilder-time-stamp: 1681926705